gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.netbrasoft.gnuob.application.order; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Locale; import javax.xml.datatype.XMLGregorianCalendar; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.authorization.Action; import org.apache.wicket.authroles.authorization.strategies.role.annotations.AuthorizeAction; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.NumberTextField; import org.apache.wicket.markup.html.form.RequiredTextField; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.CompoundPropertyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.util.convert.IConverter; import org.apache.wicket.validation.validator.RangeValidator; import org.apache.wicket.validation.validator.StringValidator; import com.netbrasoft.gnuob.api.Order; import com.netbrasoft.gnuob.api.Payment; import com.netbrasoft.gnuob.api.generic.converter.XmlGregorianCalendarConverter; import com.netbrasoft.gnuob.application.NetbrasoftApplicationConstants; import com.netbrasoft.gnuob.application.security.AppRoles; import de.agilecoders.wicket.core.markup.html.bootstrap.button.BootstrapAjaxButton; import de.agilecoders.wicket.core.markup.html.bootstrap.button.Buttons; import de.agilecoders.wicket.core.markup.html.bootstrap.button.LoadingBehavior; import de.agilecoders.wicket.core.markup.html.bootstrap.form.BootstrapForm; import de.agilecoders.wicket.core.markup.html.bootstrap.form.FormBehavior; import de.agilecoders.wicket.core.markup.html.bootstrap.form.FormType; import de.agilecoders.wicket.core.markup.html.bootstrap.table.TableBehavior; import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.datetime.DatetimePicker; import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.datetime.DatetimePickerConfig; import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.validation.TooltipValidation; @SuppressWarnings("unchecked") @AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER, AppRoles.EMPLOYEE}) public class OrderInvoicePaymentViewOrEditPanel extends Panel { @AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER}) class OrderInvoicePaymentEditFragement extends Fragment { private static final String PAYMENT_EDIT_TABLE_ID = "paymentEditTable"; private static final String ORDER_INVOICE_PAYMENT_EDIT_FRAGMENT_MARKUP_ID = "orderInvoicePaymentEditFragment"; private static final String ORDER_INVOICE_PAYMENT_VIEW_OR_EDIT_FRAGMENT_ID = "orderInvoicePaymentViewOrEditFragment"; @AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER}) class PaymentEditTable extends WebMarkupContainer { private static final String TAX_AMOUNT_ID = "taxAmount"; private static final String FEE_AMOUNT_ID = "feeAmount"; private static final String SETTLE_AMOUNT_ID = "settleAmount"; private static final String GROSS_AMOUNT_ID = "grossAmount"; private static final String INSTALLMENT_COUNT_ID = "installmentCount"; private static final String EXCHANGE_RATE_ID = "exchangeRate"; private static final String DD_MM_YYYY_FORMAT = "dd-MM-YYYY"; private static final String PAYMENT_DATE_ID = "paymentDate"; private static final String REASON_CODE_ID = "reasonCode"; private static final String HOLD_DECISION_ID = "holdDecision"; private static final String PENDING_REASON_ID = "pendingReason"; private static final String PAYMENT_STATUS_ID = "paymentStatus"; private static final String PAYMENT_TYPE_ID = "paymentType"; private static final String PROTECTION_ELIGIBILITY_TYPE_ID = "protectionEligibilityType"; private static final String TRANSACTION_TYPE_ID = "transactionType"; private static final String TRANSACTION_ID_ID = "transactionId"; private static final String TERMINAL_ID_ID = "terminalId"; private static final String STORE_ID_ID = "storeId"; private static final String PAYMENT_REQUEST_ID_ID = "paymentRequestId"; private static final String SAVE_ID = "save"; private static final String PAYMENT_EDIT_FORM_COMPONENT_ID = "paymentEditForm"; @AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER}) class SaveAjaxButton extends BootstrapAjaxButton { private static final long serialVersionUID = 2695394292963384938L; public SaveAjaxButton(final String id, final IModel<String> model, final Form<Payment> form, final Buttons.Type type) { super(id, model, form, type); setSize(Buttons.Size.Small); add(new LoadingBehavior(Model.of(OrderInvoicePaymentViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY)))); } @Override protected void onError(final AjaxRequestTarget target, final Form<?> form) { target.add(form.add(new TooltipValidation())); target.add(SaveAjaxButton.this.add(new LoadingBehavior(Model.of(OrderInvoicePaymentViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY))))); } @Override protected void onSubmit(final AjaxRequestTarget target, final Form<?> form) { if (((Payment) form.getDefaultModelObject()).getId() == 0) { ((Order) OrderInvoicePaymentViewOrEditPanel.this.getDefaultModelObject()).getInvoice().getPayments().add((Payment) form.getDefaultModelObject()); } target.add(form.setOutputMarkupId(true)); target.add(SaveAjaxButton.this.add(new LoadingBehavior(Model.of(OrderInvoicePaymentViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY))))); target.add(OrderInvoicePaymentViewOrEditPanel.this.getParent().setOutputMarkupId(true)); } } private static final long serialVersionUID = -317942480731012722L; private final BootstrapForm<Payment> paymentEditForm; private final SaveAjaxButton saveAjaxButton; public PaymentEditTable(final String id, final IModel<Order> model) { super(id, model); paymentEditForm = new BootstrapForm<Payment>(PAYMENT_EDIT_FORM_COMPONENT_ID, new CompoundPropertyModel<Payment>(OrderInvoicePaymentViewOrEditPanel.this.selectedModel)); saveAjaxButton = new SaveAjaxButton(SAVE_ID, Model.of(OrderInvoicePaymentViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY)), paymentEditForm, Buttons.Type.Primary); } @Override protected void onInitialize() { paymentEditForm.add(new TextField<String>(PAYMENT_REQUEST_ID_ID).add(StringValidator.maximumLength(20)).setOutputMarkupId(true)); paymentEditForm.add(new TextField<String>(STORE_ID_ID).add(StringValidator.maximumLength(20)).setOutputMarkupId(true)); paymentEditForm.add(new TextField<String>(TERMINAL_ID_ID).add(StringValidator.maximumLength(128)).setOutputMarkupId(true)); paymentEditForm.add(new RequiredTextField<String>(TRANSACTION_ID_ID).add(StringValidator.maximumLength(64)).setOutputMarkupId(true)); paymentEditForm.add(new RequiredTextField<String>(TRANSACTION_TYPE_ID).add(StringValidator.maximumLength(62)).setOutputMarkupId(true)); paymentEditForm.add(new TextField<String>(PROTECTION_ELIGIBILITY_TYPE_ID).add(StringValidator.maximumLength(128)).setOutputMarkupId(true)); paymentEditForm.add(new RequiredTextField<String>(PAYMENT_TYPE_ID).add(StringValidator.maximumLength(20)).setOutputMarkupId(true)); paymentEditForm.add(new RequiredTextField<String>(PAYMENT_STATUS_ID).add(StringValidator.maximumLength(20)).setOutputMarkupId(true)); paymentEditForm.add(new TextField<String>(PENDING_REASON_ID).add(StringValidator.maximumLength(20)).setOutputMarkupId(true)); paymentEditForm.add(new TextField<String>(HOLD_DECISION_ID).add(StringValidator.maximumLength(20)).setOutputMarkupId(true)); paymentEditForm.add(new TextField<String>(REASON_CODE_ID).add(StringValidator.maximumLength(20)).setOutputMarkupId(true)); paymentEditForm.add(new DatetimePicker(PAYMENT_DATE_ID, new DatetimePickerConfig().useLocale(Locale.getDefault().toString()).withFormat(DD_MM_YYYY_FORMAT)) { private static final long serialVersionUID = 1209354725150726556L; @Override public <C> IConverter<C> getConverter(final Class<C> type) { if (XMLGregorianCalendar.class.isAssignableFrom(type)) { return (IConverter<C>) new XmlGregorianCalendarConverter(); } else { return super.getConverter(type); } } }.setOutputMarkupId(true)); paymentEditForm.add(new TextField<String>(EXCHANGE_RATE_ID).add(StringValidator.maximumLength(17)).setOutputMarkupId(true)); paymentEditForm.add(new NumberTextField<BigInteger>(INSTALLMENT_COUNT_ID).add(RangeValidator.minimum(BigDecimal.ZERO)).setOutputMarkupId(true)); paymentEditForm.add(new NumberTextField<BigDecimal>(GROSS_AMOUNT_ID).setRequired(true).add(RangeValidator.minimum(BigDecimal.ZERO)).setOutputMarkupId(true)); paymentEditForm.add(new NumberTextField<BigDecimal>(SETTLE_AMOUNT_ID).add(RangeValidator.minimum(BigDecimal.ZERO)).setOutputMarkupId(true)); paymentEditForm.add(new NumberTextField<BigDecimal>(FEE_AMOUNT_ID).add(RangeValidator.minimum(BigDecimal.ZERO)).setOutputMarkupId(true)); paymentEditForm.add(new NumberTextField<BigDecimal>(TAX_AMOUNT_ID).add(RangeValidator.minimum(BigDecimal.ZERO)).setOutputMarkupId(true)); paymentEditForm.add(saveAjaxButton.setOutputMarkupId(true)); add(paymentEditForm.add(new FormBehavior(FormType.Horizontal)).setOutputMarkupId(true)); super.onInitialize(); } } private static final long serialVersionUID = 3709791409078428685L; private final WebMarkupContainer paymentEditTable; public OrderInvoicePaymentEditFragement() { super(ORDER_INVOICE_PAYMENT_VIEW_OR_EDIT_FRAGMENT_ID, ORDER_INVOICE_PAYMENT_EDIT_FRAGMENT_MARKUP_ID, OrderInvoicePaymentViewOrEditPanel.this, OrderInvoicePaymentViewOrEditPanel.this.getDefaultModel()); paymentEditTable = new PaymentEditTable(PAYMENT_EDIT_TABLE_ID, (IModel<Order>) OrderInvoicePaymentEditFragement.this.getDefaultModel()); } @Override protected void onInitialize() { add(paymentEditTable.add(new TableBehavior()).setOutputMarkupId(true)); super.onInitialize(); } } @AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER, AppRoles.EMPLOYEE}) class OrderInvoicePaymentViewFragement extends Fragment { private static final String PAYMENT_VIEW_TABLE_ID = "paymentViewTable"; private static final String ORDER_INVOICE_PAYMENT_VIEW_FRAGMENT_MARKUP_ID = "orderInvoicePaymentViewFragment"; private static final String ORDER_INVOICE_PAYMENT_VIEW_OR_EDIT_FRAGMENT_ID = "orderInvoicePaymentViewOrEditFragment"; @AuthorizeAction(action = Action.ENABLE, roles = {AppRoles.MANAGER, AppRoles.EMPLOYEE}) class PaymentViewTable extends WebMarkupContainer { private static final String TAX_AMOUNT_ID = "taxAmount"; private static final String FEE_AMOUNT_ID = "feeAmount"; private static final String SETTLE_AMOUNT_ID = "settleAmount"; private static final String GROSS_AMOUNT_ID = "grossAmount"; private static final String INSTALLMENT_COUNT_ID = "installmentCount"; private static final String EXCHANGE_RATE_ID = "exchangeRate"; private static final String DD_MM_YYYY_FORMAT = "dd-MM-YYYY"; private static final String PAYMENT_DATE_ID = "paymentDate"; private static final String REASON_CODE_ID = "reasonCode"; private static final String HOLD_DECISION_ID = "holdDecision"; private static final String PENDING_REASON_ID = "pendingReason"; private static final String PAYMENT_STATUS_ID = "paymentStatus"; private static final String PAYMENT_TYPE_ID = "paymentType"; private static final String PROTECTION_ELIGIBILITY_TYPE_ID = "protectionEligibilityType"; private static final String TRANSACTION_TYPE_ID = "transactionType"; private static final String TRANSACTION_ID_ID = "transactionId"; private static final String TERMINAL_ID_ID = "terminalId"; private static final String STORE_ID_ID = "storeId"; private static final String PAYMENT_REQUEST_ID_ID = "paymentRequestId"; private static final String PAYMENT_VIEW_FORM_COMPONENT_ID = "paymentViewForm"; private static final long serialVersionUID = 3485437486331806341L; private final BootstrapForm<Payment> paymentViewForm; public PaymentViewTable(final String id, final IModel<Order> model) { super(id, model); paymentViewForm = new BootstrapForm<Payment>(PAYMENT_VIEW_FORM_COMPONENT_ID, new CompoundPropertyModel<Payment>(OrderInvoicePaymentViewOrEditPanel.this.selectedModel)); } @Override protected void onInitialize() { paymentViewForm.add(new TextField<String>(PAYMENT_REQUEST_ID_ID).setOutputMarkupId(true)); paymentViewForm.add(new TextField<String>(STORE_ID_ID).setOutputMarkupId(true)); paymentViewForm.add(new TextField<String>(TERMINAL_ID_ID).setOutputMarkupId(true)); paymentViewForm.add(new RequiredTextField<String>(TRANSACTION_ID_ID).setOutputMarkupId(true)); paymentViewForm.add(new RequiredTextField<String>(TRANSACTION_TYPE_ID).setOutputMarkupId(true)); paymentViewForm.add(new TextField<String>(PROTECTION_ELIGIBILITY_TYPE_ID).setOutputMarkupId(true)); paymentViewForm.add(new RequiredTextField<String>(PAYMENT_TYPE_ID).setOutputMarkupId(true)); paymentViewForm.add(new RequiredTextField<String>(PAYMENT_STATUS_ID).setOutputMarkupId(true)); paymentViewForm.add(new TextField<String>(PENDING_REASON_ID).setOutputMarkupId(true)); paymentViewForm.add(new TextField<String>(HOLD_DECISION_ID).setOutputMarkupId(true)); paymentViewForm.add(new TextField<String>(REASON_CODE_ID).setOutputMarkupId(true)); paymentViewForm.add(new DatetimePicker(PAYMENT_DATE_ID, new DatetimePickerConfig().useLocale(Locale.getDefault().toString()).withFormat(DD_MM_YYYY_FORMAT)) { private static final long serialVersionUID = 1209354725150726556L; @Override public <C> IConverter<C> getConverter(final Class<C> type) { if (XMLGregorianCalendar.class.isAssignableFrom(type)) { return (IConverter<C>) new XmlGregorianCalendarConverter(); } else { return super.getConverter(type); } } }.setOutputMarkupId(true)); paymentViewForm.add(new TextField<String>(EXCHANGE_RATE_ID).setOutputMarkupId(true)); paymentViewForm.add(new NumberTextField<BigInteger>(INSTALLMENT_COUNT_ID).setOutputMarkupId(true)); paymentViewForm.add(new NumberTextField<BigDecimal>(GROSS_AMOUNT_ID).setOutputMarkupId(true)); paymentViewForm.add(new NumberTextField<BigDecimal>(SETTLE_AMOUNT_ID).setOutputMarkupId(true)); paymentViewForm.add(new NumberTextField<BigDecimal>(FEE_AMOUNT_ID).setOutputMarkupId(true)); paymentViewForm.add(new NumberTextField<BigDecimal>(TAX_AMOUNT_ID).setOutputMarkupId(true)); add(paymentViewForm.add(new FormBehavior(FormType.Horizontal)).setOutputMarkupId(true)); super.onInitialize(); } } private static final long serialVersionUID = 6927997909191615786L; private final WebMarkupContainer paymentViewTable; public OrderInvoicePaymentViewFragement() { super(ORDER_INVOICE_PAYMENT_VIEW_OR_EDIT_FRAGMENT_ID, ORDER_INVOICE_PAYMENT_VIEW_FRAGMENT_MARKUP_ID, OrderInvoicePaymentViewOrEditPanel.this, OrderInvoicePaymentViewOrEditPanel.this.getDefaultModel()); paymentViewTable = new PaymentViewTable(PAYMENT_VIEW_TABLE_ID, (IModel<Order>) OrderInvoicePaymentViewFragement.this.getDefaultModel()); } @Override protected void onInitialize() { add(paymentViewTable.add(new TableBehavior()).setOutputMarkupId(true)); super.onInitialize(); } } private static final long serialVersionUID = -7002701340914975498L; private IModel<Payment> selectedModel; public OrderInvoicePaymentViewOrEditPanel(final String id, final IModel<Order> model) { super(id, model); selectedModel = Model.of(new Payment()); } public void setSelectedModel(final IModel<Payment> selectedModel) { this.selectedModel = selectedModel; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.transform.transforms; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.transform.TransformField; import java.io.IOException; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class SettingsConfig implements Writeable, ToXContentObject { public static final ConstructingObjectParser<SettingsConfig, Void> STRICT_PARSER = createParser(false); public static final ConstructingObjectParser<SettingsConfig, Void> LENIENT_PARSER = createParser(true); private static final int DEFAULT_MAX_PAGE_SEARCH_SIZE = -1; private static final float DEFAULT_DOCS_PER_SECOND = -1F; private static final int DEFAULT_DATES_AS_EPOCH_MILLIS = -1; private static ConstructingObjectParser<SettingsConfig, Void> createParser(boolean lenient) { ConstructingObjectParser<SettingsConfig, Void> parser = new ConstructingObjectParser<>( "transform_config_settings", lenient, args -> new SettingsConfig((Integer) args[0], (Float) args[1], (Integer) args[2]) ); parser.declareIntOrNull(optionalConstructorArg(), DEFAULT_MAX_PAGE_SEARCH_SIZE, TransformField.MAX_PAGE_SEARCH_SIZE); parser.declareFloatOrNull(optionalConstructorArg(), DEFAULT_DOCS_PER_SECOND, TransformField.DOCS_PER_SECOND); // this boolean requires 4 possible values: true, false, not_specified, default, therefore using a custom parser parser.declareField( optionalConstructorArg(), p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? DEFAULT_DATES_AS_EPOCH_MILLIS : p.booleanValue() ? 1 : 0, TransformField.DATES_AS_EPOCH_MILLIS, ValueType.BOOLEAN_OR_NULL ); return parser; } private final Integer maxPageSearchSize; private final Float docsPerSecond; private final Integer datesAsEpochMillis; public SettingsConfig() { this(null, null, (Integer) null); } public SettingsConfig(Integer maxPageSearchSize, Float docsPerSecond, Boolean datesAsEpochMillis) { this(maxPageSearchSize, docsPerSecond, datesAsEpochMillis == null ? null : datesAsEpochMillis ? 1 : 0); } public SettingsConfig(Integer maxPageSearchSize, Float docsPerSecond, Integer datesAsEpochMillis) { this.maxPageSearchSize = maxPageSearchSize; this.docsPerSecond = docsPerSecond; this.datesAsEpochMillis = datesAsEpochMillis; } public SettingsConfig(final StreamInput in) throws IOException { this.maxPageSearchSize = in.readOptionalInt(); this.docsPerSecond = in.readOptionalFloat(); if (in.getVersion().onOrAfter(Version.V_7_11_0)) { this.datesAsEpochMillis = in.readOptionalInt(); } else { this.datesAsEpochMillis = DEFAULT_DATES_AS_EPOCH_MILLIS; } } public Integer getMaxPageSearchSize() { return maxPageSearchSize; } public Float getDocsPerSecond() { return docsPerSecond; } public Boolean getDatesAsEpochMillis() { return datesAsEpochMillis != null ? datesAsEpochMillis > 0 : null; } public Integer getDatesAsEpochMillisForUpdate() { return datesAsEpochMillis; } public ActionRequestValidationException validate(ActionRequestValidationException validationException) { // TODO: make this dependent on search.max_buckets if (maxPageSearchSize != null && (maxPageSearchSize < 10 || maxPageSearchSize > 10_000)) { validationException = addValidationError( "settings.max_page_search_size [" + maxPageSearchSize + "] must be greater than 10 and less than 10,000", validationException ); } return validationException; } public boolean isValid() { return true; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalInt(maxPageSearchSize); out.writeOptionalFloat(docsPerSecond); if (out.getVersion().onOrAfter(Version.V_7_11_0)) { out.writeOptionalInt(datesAsEpochMillis); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); // do not write default values if (maxPageSearchSize != null && (maxPageSearchSize.equals(DEFAULT_MAX_PAGE_SEARCH_SIZE) == false)) { builder.field(TransformField.MAX_PAGE_SEARCH_SIZE.getPreferredName(), maxPageSearchSize); } if (docsPerSecond != null && (docsPerSecond.equals(DEFAULT_DOCS_PER_SECOND) == false)) { builder.field(TransformField.DOCS_PER_SECOND.getPreferredName(), docsPerSecond); } if (datesAsEpochMillis != null && (datesAsEpochMillis.equals(DEFAULT_DATES_AS_EPOCH_MILLIS) == false)) { builder.field(TransformField.DATES_AS_EPOCH_MILLIS.getPreferredName(), datesAsEpochMillis > 0 ? true : false); } builder.endObject(); return builder; } @Override public boolean equals(Object other) { if (other == this) { return true; } if (other == null || other.getClass() != getClass()) { return false; } SettingsConfig that = (SettingsConfig) other; return Objects.equals(maxPageSearchSize, that.maxPageSearchSize) && Objects.equals(docsPerSecond, that.docsPerSecond) && Objects.equals(datesAsEpochMillis, that.datesAsEpochMillis); } @Override public int hashCode() { return Objects.hash(maxPageSearchSize, docsPerSecond, datesAsEpochMillis); } @Override public String toString() { return Strings.toString(this, true, true); } public static SettingsConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); } public static class Builder { private Integer maxPageSearchSize; private Float docsPerSecond; private Integer datesAsEpochMillis; /** * Default builder */ public Builder() {} /** * Builder starting from existing settings as base, for the purpose of partially updating settings. * * @param base base settings */ public Builder(SettingsConfig base) { this.maxPageSearchSize = base.maxPageSearchSize; this.docsPerSecond = base.docsPerSecond; this.datesAsEpochMillis = base.datesAsEpochMillis; } /** * Sets the paging maximum paging maxPageSearchSize that transform can use when * pulling the data from the source index. * * If OOM is triggered, the paging maxPageSearchSize is dynamically reduced so that the transform can continue to gather data. * * @param maxPageSearchSize Integer value between 10 and 10_000 * @return the {@link Builder} with the paging maxPageSearchSize set. */ public Builder setMaxPageSearchSize(Integer maxPageSearchSize) { this.maxPageSearchSize = maxPageSearchSize == null ? DEFAULT_MAX_PAGE_SEARCH_SIZE : maxPageSearchSize; return this; } /** * Sets the docs per second that transform can use when pulling the data from the source index. * * This setting throttles transform by issuing queries less often, however processing still happens in * batches. A value of 0 disables throttling (default). * * @param docsPerSecond Integer value * @return the {@link Builder} with requestsPerSecond set. */ public Builder setRequestsPerSecond(Float docsPerSecond) { this.docsPerSecond = docsPerSecond == null ? DEFAULT_DOCS_PER_SECOND : docsPerSecond; return this; } /** * Whether to write the output of a date aggregation as millis since epoch or as formatted string (ISO format). * * Transforms created before 7.11 write dates as epoch_millis. The new default is ISO string. * You can use this setter to configure the old style writing as epoch millis. * * An explicit `null` resets to default. * * @param datesAsEpochMillis true if dates should be written as epoch_millis. * @return the {@link Builder} with datesAsEpochMilli set. */ public Builder setDatesAsEpochMillis(Boolean datesAsEpochMillis) { this.datesAsEpochMillis = datesAsEpochMillis == null ? DEFAULT_DATES_AS_EPOCH_MILLIS : datesAsEpochMillis ? 1 : 0; return this; } /** * Update settings according to given settings config. * * @param update update settings * @return the {@link Builder} with applied updates. */ public Builder update(SettingsConfig update) { // if explicit {@code null}s have been set in the update, we do not want to carry the default, but get rid // of the setting if (update.getDocsPerSecond() != null) { this.docsPerSecond = update.getDocsPerSecond().equals(DEFAULT_DOCS_PER_SECOND) ? null : update.getDocsPerSecond(); } if (update.getMaxPageSearchSize() != null) { this.maxPageSearchSize = update.getMaxPageSearchSize().equals(DEFAULT_MAX_PAGE_SEARCH_SIZE) ? null : update.getMaxPageSearchSize(); } if (update.getDatesAsEpochMillisForUpdate() != null) { this.datesAsEpochMillis = update.getDatesAsEpochMillisForUpdate().equals(DEFAULT_DATES_AS_EPOCH_MILLIS) ? null : update.getDatesAsEpochMillisForUpdate(); } return this; } public SettingsConfig build() { return new SettingsConfig(maxPageSearchSize, docsPerSecond, datesAsEpochMillis); } } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.emm.agent.api; import android.app.Service; import android.content.Context; import android.content.Intent; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.AsyncTask; import android.os.Bundle; import android.os.IBinder; import android.util.Log; import org.json.JSONException; import org.json.JSONObject; import org.wso2.emm.agent.proxy.IDPTokenManagerException; import org.wso2.emm.agent.proxy.beans.EndPointInfo; import org.wso2.emm.agent.proxy.utils.ServerUtilities; import org.wso2.emm.agent.utils.Constants; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutionException; /** * This class handles all the functionalities related to retrieving device * current location. */ public class GPSTracker extends Service implements LocationListener { private Location location; private double latitude; private double longitude; private String street1; private String street2; private String city; private String state; private String zip; private String country; private static final long MIN_DISTANCE_CHANGE_FOR_UPDATES = 10; private static final long MIN_TIME_BW_UPDATES = 1000 * 60 * 1; protected LocationManager locationManager; private static final String TAG = GPSTracker.class.getName(); public GPSTracker(Context context) { locationManager = (LocationManager) context.getSystemService(LOCATION_SERVICE); getLocation(); } /** * Function to get device location using GPS. * @return - Device location coordinates. */ private void getLocation() { if (locationManager != null) { try { boolean isGpsEnabled = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER); boolean isNetworkEnabled = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER); if (isNetworkEnabled) { locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, MIN_TIME_BW_UPDATES, MIN_DISTANCE_CHANGE_FOR_UPDATES, this); if (locationManager != null) { location = locationManager.getLastKnownLocation( LocationManager.NETWORK_PROVIDER); if (location != null) { latitude = location.getLatitude(); longitude = location.getLongitude(); } } } if (isGpsEnabled) { if (location == null) { locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, MIN_TIME_BW_UPDATES, MIN_DISTANCE_CHANGE_FOR_UPDATES, this); if (locationManager != null) { location = locationManager.getLastKnownLocation( LocationManager.GPS_PROVIDER); if (location != null) { latitude = location.getLatitude(); longitude = location.getLongitude(); } } } } setReversGeoCoordinates(); } catch (RuntimeException e) { Log.e(TAG, "No network/GPS Switched off.", e); } catch (InterruptedException e) { Log.e(TAG, "Error occured while calling reverse geo coordination API.", e); } catch (ExecutionException e) { Log.e(TAG, "Error occured while calling reverse geo coordination API.", e); } } } /** * Stop using GPS listener. * Calling this function will stop using GPS the agent. */ public void stopUsingGps() { if (locationManager != null) { locationManager.removeUpdates(GPSTracker.this); } } /** * Function to get latitude. * @return - Device current latitude. */ public double getLatitude() { if (location != null) { latitude = location.getLatitude(); } return latitude; } /** * Function to get longitude. * @return - Device current longitude. */ public double getLongitude() { if (location != null) { longitude = location.getLongitude(); } return longitude; } @Override public void onLocationChanged(Location location) { } @Override public void onProviderDisabled(String provider) { } @Override public void onProviderEnabled(String provider) { } @Override public void onStatusChanged(String provider, int status, Bundle extras) { } @Override public IBinder onBind(Intent arg0) { return null; } public String getStreet1() { return street1; } public String getStreet2() { return street2; } public String getCity() { return city; } public String getState() { return state; } public String getZip() { return zip; } public String getCountry() { return country; } /** * In this method, it calls reverse geo coordination API and set relevant values. */ private void setReversGeoCoordinates() throws ExecutionException, InterruptedException { StringBuilder endPoint = new StringBuilder(); endPoint.append(Constants.Location.GEO_ENDPOINT); endPoint.append("?" + Constants.Location.RESULT_FORMAT); endPoint.append("&" + Constants.Location.ACCEPT_LANGUAGE + "=" + Constants.Location.LANGUAGE_CODE); endPoint.append("&" + Constants.Location.LATITUDE + "=" + latitude); endPoint.append("&" + Constants.Location.LONGITUDE + "=" + longitude); EndPointInfo endPointInfo = new EndPointInfo(); endPointInfo.setHttpMethod(org.wso2.emm.agent.proxy.utils.Constants.HTTP_METHODS.GET); endPointInfo.setEndPoint(endPoint.toString()); SendRequest sendRequestTask = new SendRequest(); sendRequestTask.execute(endPointInfo).get(); } /** * This class is used to send requests to reverse geo coordination API. * The reason to use this private class because the function which is already * available for sending requests is secured with token. Therefor this async task can be used * to send requests without tokens. */ private class SendRequest extends AsyncTask<EndPointInfo, Void, Map<String, String>> { @Override protected Map<String, String> doInBackground(EndPointInfo... params) { EndPointInfo endPointInfo = params[0]; Map<String, String> responseParams = null; Map<String, String> headers = new HashMap<String, String>(); headers.put("User-Agent", Constants.USER_AGENT); try { responseParams = ServerUtilities.postData(endPointInfo, headers); if (Constants.DEBUG_MODE_ENABLED) { Log.d(TAG, "Response Code: " + responseParams.get(org.wso2.emm.agent.proxy.utils.Constants.SERVER_RESPONSE_STATUS)); Log.d(TAG, "Response Payload: " + responseParams.get(org.wso2.emm.agent.proxy.utils.Constants.SERVER_RESPONSE_BODY)); } } catch (IDPTokenManagerException e) { Log.e(TAG, "Failed to contact server", e); } return responseParams; } @Override protected void onPostExecute(Map<String, String> result) { if (result != null) { String responseCode = result.get(org.wso2.emm.agent.proxy.utils.Constants.SERVER_RESPONSE_STATUS); if (Constants.Status.SUCCESSFUL.equals(responseCode)) { String resultPayload = result.get(org.wso2.emm.agent.proxy.utils.Constants.SERVER_RESPONSE_BODY); try { JSONObject data = new JSONObject(resultPayload); if (!data.isNull(Constants.Location.ADDRESS)) { JSONObject address = data.getJSONObject(Constants.Location.ADDRESS); if (!address.isNull(Constants.Location.CITY)) { city = address.getString(Constants.Location.CITY); } else if (!address.isNull(Constants.Location.TOWN)) { city = address.getString(Constants.Location.TOWN); } if (!address.isNull(Constants.Location.COUNTRY)) { country = address.getString(Constants.Location.COUNTRY); } if (!address.isNull(Constants.Location.STREET1)) { street1 = address.getString(Constants.Location.STREET1); } if (!address.isNull(Constants.Location.STREET2)) { street2 = address.getString(Constants.Location.STREET2); } if (!address.isNull(Constants.Location.STATE)) { state = address.getString(Constants.Location.STATE); } if (!address.isNull(Constants.Location.ZIP)) { zip = address.getString(Constants.Location.ZIP); } } if (Constants.DEBUG_MODE_ENABLED) { Log.d(TAG, "Address: " + street1 + ", " + street2 + ", " + city + ", " + state + ", " + zip + ", " + country); } } catch (JSONException e) { Log.e(TAG, "Error occurred while parsing the result payload", e); } } } } } }
/** * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package models; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; import org.joda.time.DateTime; import org.joda.time.Period; import com.avaje.ebean.Ebean; import com.avaje.ebean.Query; import com.avaje.ebean.RawSql; import com.avaje.ebean.RawSqlBuilder; import com.avaje.ebean.SqlUpdate; /** * This Class is used to save all Actions on the Mailserver * * @author Patrick Thum, Xceptance Software Technologies GmbH, Germany */ @Entity @Table(name = "mailtransactions") public class MailTransaction { @Id private Long id; private Long ts; private int status; private String sourceaddr; private String relayaddr; private String targetaddr; /** * the Default-Constructor which initializes all Fields with Default-values */ public MailTransaction() { id = 0L; ts = DateTime.now().getMillis(); status = 0; targetaddr = ""; sourceaddr = ""; } /** * Creates an MailTransaction-Object, with Parameters<br/> * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern<br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @param stat * Statuscode of the Transaction * @param source * the Sender's - Address * @param relay * Relay-Address of the Mail (the mail which is virtually created on this app) * @param target * Original Recipients-Address of the Mail */ public MailTransaction(int stat, String source, String relay, String target) { ts = DateTime.now().getMillis(); this.status = stat; this.targetaddr = target; this.sourceaddr = source; this.relayaddr = relay; } /** * @return the ID of this Transaction */ public Long getId() { return id; } /** * @param id * the ID of this Transaction to set */ public void setId(Long id) { this.id = id; } /** * @return the Timestamp of this Transaction */ public Long getTs() { return ts; } /** * @return the Timestamp as String in the Format "dd.MM.yyyy hh:mm" */ public String getTsAsString() { return new SimpleDateFormat("dd.MM.yyyy HH:mm").format(new Date(this.ts)).toString(); } /** * @param ts * sets the Timestamp in Milliseconds */ public void setTs(Long ts) { this.ts = ts; } /** * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern <br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @return a Statuscode */ public int getStatus() { return status; } /** * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern<br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @param status * the Status to set */ public void setStatus(int status) { this.status = status; } /** * @return the Target-Address of this Transaction */ public String getTargetaddr() { return targetaddr; } /** * @param targetaddr * the Target-Address to set */ public void setTargetaddr(String targetaddr) { this.targetaddr = targetaddr; } /** * @return the Source-Address of this transaction */ public String getSourceaddr() { return sourceaddr; } /** * @param sourceaddr * the Source-Address to set */ public void setSourceaddr(String sourceaddr) { this.sourceaddr = sourceaddr; } /** * @return the Relay-Address of this transaction (if existent) */ public String getRelayaddr() { return relayaddr; } /** * @param relayaddr * the Relay-Address of this transaction (if existent) */ public void setRelayaddr(String relayaddr) { this.relayaddr = relayaddr; } // ------------------------------------------------------- // E-Bean Functions // ------------------------------------------------------- /** * @return all Transactions which were stored in the Database */ public static List<MailTransaction> all() { return Ebean.find(MailTransaction.class).findList(); } /** * @param sortage * a String which indicates the sortage of the returned list, the string should be in the form "fieldname * asc" or "fieldname desc" * @return a sorted list of all MailTransactions */ public static List<MailTransaction> all(String sortage) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().orderBy(sortage).findList(); return list; } /** * Gets all Mail-Transactions in the last "Period" * * @param period * Joda-Time Period * @return a List of Mail-Transactions */ public static List<MailTransaction> getAllInPeriod(Period period) { return Ebean.find(MailTransaction.class).where().gt("ts", DateTime.now().minus(period).getMillis()).findList(); } /** * returns a list of MailTransactions sorted descending and limited by the given number * * @param limit * the maximal row number * @return a sorted list of all MailTransactions */ public static List<MailTransaction> getSortedAndLimitedList(int limit) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().orderBy("ts desc").setMaxRows(limit) .findList(); return list; } /** * returns a list of MailTransactions with the given target address * * @param targetAddr * the target address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForTarget(final String targetAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("targetaddr", targetAddr) .orderBy("ts desc").findList(); return list; } /** * returns a list of MailTransactions with the given relay address * * @param relayAddr * the relay address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForRelay(final String relayAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("relayaddr", relayAddr) .orderBy("ts desc").findList(); return list; } /** * returns a list of MailTransactions with the given source address * * @param sourceAddr * the source address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForSource(final String sourceAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("sourceaddr", sourceAddr) .orderBy("ts desc").findList(); return list; } /** * Deletes all Transactions that have been stored before the given Timestamp * * @param ts * the Timestamp in milliseconds */ public static void deleteTxInPeriod(Long ts) { String sql = "DELETE FROM MAILTRANSACTIONS"; if (ts != null) { // there's a timestamp, add sql += " WHERE ts < " + ts; } SqlUpdate down = Ebean.createSqlUpdate(sql); down.execute(); } /** * returns a specific MailTransaction that belongs to the ID * * @param id * the ID of an MailTransaction * @return a MailTransaction */ public static MailTransaction getById(long id) { return Ebean.find(MailTransaction.class, id); } /** * Generates a List of Status-Numbers and the Number of their occurrences * * @return a List of Status-Elements (as an aggregate of Transactions) * @see Status */ public static List<Status> getStatusList() { // create a sql-query that contains the statuscode and their number of occurences String sql = "SELECT mtx.status, COUNT(mtx.status) AS count FROM mailtransactions mtx GROUP BY mtx.status"; RawSql rawSql = RawSqlBuilder.parse(sql).columnMapping("mtx.status", "statuscode").create(); Query<Status> query = Ebean.find(Status.class); query.setRawSql(rawSql); List<Status> list = query.findList(); return list; } /** * Saves the Transaction in the Database */ public void save() { Ebean.save(this); } /** * saves multiple elements * * @param mtxList */ public static void saveMultipleTx(List<MailTransaction> mtxList) { Ebean.save(mtxList); } }
package com.parrot.sdksample.drone; import android.content.Context; import android.os.Handler; import android.support.annotation.NonNull; import android.util.Log; import com.parrot.arsdk.arcommands.ARCOMMANDS_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED_ERROR_ENUM; import com.parrot.arsdk.arcommands.ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM; import com.parrot.arsdk.arcontroller.ARCONTROLLER_DEVICE_STATE_ENUM; import com.parrot.arsdk.arcontroller.ARCONTROLLER_DICTIONARY_KEY_ENUM; import com.parrot.arsdk.arcontroller.ARCONTROLLER_ERROR_ENUM; import com.parrot.arsdk.arcontroller.ARControllerArgumentDictionary; import com.parrot.arsdk.arcontroller.ARControllerCodec; import com.parrot.arsdk.arcontroller.ARControllerDictionary; import com.parrot.arsdk.arcontroller.ARControllerException; import com.parrot.arsdk.arcontroller.ARDeviceController; import com.parrot.arsdk.arcontroller.ARDeviceControllerListener; import com.parrot.arsdk.arcontroller.ARDeviceControllerStreamListener; import com.parrot.arsdk.arcontroller.ARFeatureARDrone3; import com.parrot.arsdk.arcontroller.ARFeatureCommon; import com.parrot.arsdk.arcontroller.ARFeatureSkyController; import com.parrot.arsdk.arcontroller.ARFrame; import com.parrot.arsdk.ardiscovery.ARDISCOVERY_PRODUCT_ENUM; import com.parrot.arsdk.ardiscovery.ARDiscoveryDevice; import com.parrot.arsdk.ardiscovery.ARDiscoveryDeviceNetService; import com.parrot.arsdk.ardiscovery.ARDiscoveryDeviceService; import com.parrot.arsdk.ardiscovery.ARDiscoveryException; import com.parrot.arsdk.ardiscovery.ARDiscoveryService; import com.parrot.arsdk.arutils.ARUTILS_DESTINATION_ENUM; import com.parrot.arsdk.arutils.ARUTILS_FTP_TYPE_ENUM; import com.parrot.arsdk.arutils.ARUtilsException; import com.parrot.arsdk.arutils.ARUtilsManager; import java.util.ArrayList; import java.util.List; public class SkyControllerDrone { private static final String TAG = "SkyControllerDrone"; public interface Listener { /** * Called when the connection to the SkyController changes * Called in the main thread * @param state the state of the SkyController */ void onSkyControllerConnectionChanged(ARCONTROLLER_DEVICE_STATE_ENUM state); /** * Called when the connection to the drone changes * Called in the main thread * @param state the state of the drone */ void onDroneConnectionChanged(ARCONTROLLER_DEVICE_STATE_ENUM state); /** * Called when the SkyController battery charge changes * Called in the main thread * @param batteryPercentage the battery remaining (in percent) */ void onSkyControllerBatteryChargeChanged(int batteryPercentage); /** * Called when the battery charge changes * Called in the main thread * @param batteryPercentage the battery remaining (in percent) */ void onDroneBatteryChargeChanged(int batteryPercentage); /** * Called when the piloting state changes * Called in the main thread * @param state the piloting state of the drone */ void onPilotingStateChanged(ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM state); /** * Called when a picture is taken * Called on a separate thread * @param error ERROR_OK if picture has been taken, otherwise describe the error */ void onPictureTaken(ARCOMMANDS_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED_ERROR_ENUM error); /** * Called when the video decoder should be configured * Called on a separate thread * @param codec the codec to configure the decoder with */ void configureDecoder(ARControllerCodec codec); /** * Called when a video frame has been received * Called on a separate thread * @param frame the video frame */ void onFrameReceived(ARFrame frame); /** * Called before medias will be downloaded * Called in the main thread * @param nbMedias the number of medias that will be downloaded */ void onMatchingMediasFound(int nbMedias); /** * Called each time the progress of a download changes * Called in the main thread * @param mediaName the name of the media * @param progress the progress of its download (from 0 to 100) */ void onDownloadProgressed(String mediaName, int progress); /** * Called when a media download has ended * Called in the main thread * @param mediaName the name of the media */ void onDownloadComplete(String mediaName); } private final List<Listener> mListeners; private final Handler mHandler; private final Context mContext; private ARDeviceController mDeviceController; private SDCardModule mSDCardModule; private ARCONTROLLER_DEVICE_STATE_ENUM mSkyControllerState; private ARCONTROLLER_DEVICE_STATE_ENUM mDroneState; private ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM mFlyingState; private String mCurrentRunId; private ARDiscoveryDeviceService mDeviceService; private ARUtilsManager mFtpListManager; private ARUtilsManager mFtpQueueManager; public SkyControllerDrone(Context context, @NonNull ARDiscoveryDeviceService deviceService) { mContext = context; mListeners = new ArrayList<>(); mDeviceService = deviceService; // needed because some callbacks will be called on the main thread mHandler = new Handler(context.getMainLooper()); mSkyControllerState = ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_STOPPED; mDroneState = ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_STOPPED; // if the product type of the deviceService match with the types supported ARDISCOVERY_PRODUCT_ENUM productType = ARDiscoveryService.getProductFromProductID(deviceService.getProductID()); if (ARDISCOVERY_PRODUCT_ENUM.ARDISCOVERY_PRODUCT_SKYCONTROLLER.equals(productType)) { ARDiscoveryDevice discoveryDevice = createDiscoveryDevice(deviceService); if (discoveryDevice != null) { mDeviceController = createDeviceController(discoveryDevice); discoveryDevice.dispose(); } try { mFtpListManager = new ARUtilsManager(); mFtpQueueManager = new ARUtilsManager(); mFtpListManager.initFtp(mContext, deviceService, ARUTILS_DESTINATION_ENUM.ARUTILS_DESTINATION_DRONE, ARUTILS_FTP_TYPE_ENUM.ARUTILS_FTP_TYPE_GENERIC); mFtpQueueManager.initFtp(mContext, deviceService, ARUTILS_DESTINATION_ENUM.ARUTILS_DESTINATION_DRONE, ARUTILS_FTP_TYPE_ENUM.ARUTILS_FTP_TYPE_GENERIC); mSDCardModule = new SDCardModule(mFtpListManager, mFtpQueueManager); mSDCardModule.addListener(mSDCardModuleListener); } catch (ARUtilsException e) { Log.e(TAG, "Exception", e); } } else { Log.e(TAG, "DeviceService type is not supported by SkyControllerDrone"); } } public void dispose() { if (mDeviceController != null) mDeviceController.dispose(); if (mFtpListManager != null) mFtpListManager.closeFtp(mContext, mDeviceService); if (mFtpQueueManager != null) mFtpQueueManager.closeFtp(mContext, mDeviceService); } //region Listener functions public void addListener(Listener listener) { mListeners.add(listener); } public void removeListener(Listener listener) { mListeners.remove(listener); } //endregion Listener /** * Connect to the drone * @return true if operation was successful. * Returning true doesn't mean that device is connected. * You can be informed of the actual connection through {@link Listener#onSkyControllerConnectionChanged} */ public boolean connect() { boolean success = false; if ((mDeviceController != null) && (ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_STOPPED.equals(mSkyControllerState))) { ARCONTROLLER_ERROR_ENUM error = mDeviceController.start(); if (error == ARCONTROLLER_ERROR_ENUM.ARCONTROLLER_OK) { success = true; } } return success; } /** * Disconnect from the drone * @return true if operation was successful. * Returning true doesn't mean that device is disconnected. * You can be informed of the actual disconnection through {@link Listener#onSkyControllerConnectionChanged} */ public boolean disconnect() { boolean success = false; if ((mDeviceController != null) && (ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING.equals(mSkyControllerState))) { ARCONTROLLER_ERROR_ENUM error = mDeviceController.stop(); if (error == ARCONTROLLER_ERROR_ENUM.ARCONTROLLER_OK) { success = true; } } return success; } /** * Get the current connection state * @return the connection state of the drone */ public ARCONTROLLER_DEVICE_STATE_ENUM getSkyControllerConnectionState() { return mSkyControllerState; } /** * Get the current connection state * @return the connection state of the drone */ public ARCONTROLLER_DEVICE_STATE_ENUM getDroneConnectionState() { return mDroneState; } /** * Get the current flying state * @return the flying state */ public ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM getFlyingState() { return mFlyingState; } public void takeOff() { if ((mDeviceController != null) && (mSkyControllerState.equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING)) && (mDeviceController.getExtensionState().equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING))) { mDeviceController.getFeatureARDrone3().sendPilotingTakeOff(); } } public void land() { if ((mDeviceController != null) && (mSkyControllerState.equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING)) && (mDeviceController.getExtensionState().equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING))) { mDeviceController.getFeatureARDrone3().sendPilotingLanding(); } } public void emergency() { if ((mDeviceController != null) && (mSkyControllerState.equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING)) && (mDeviceController.getExtensionState().equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING))) { mDeviceController.getFeatureARDrone3().sendPilotingEmergency(); } } public void takePicture() { if ((mDeviceController != null) && (mSkyControllerState.equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING)) && (mDeviceController.getExtensionState().equals(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING))) { mDeviceController.getFeatureARDrone3().sendMediaRecordPictureV2(); } } /** * Download the last flight medias * Uses the run id to download all medias related to the last flight * If no run id is available, download all medias of the day */ public void getLastFlightMedias() { String runId = mCurrentRunId; if ((runId != null) && !runId.isEmpty()) { mSDCardModule.getFlightMedias(runId); } else { Log.e(TAG, "RunID not available, fallback to the day's medias"); mSDCardModule.getTodaysFlightMedias(); } } public void cancelGetLastFlightMedias() { mSDCardModule.cancelGetFlightMedias(); } private ARDiscoveryDevice createDiscoveryDevice(@NonNull ARDiscoveryDeviceService service) { ARDiscoveryDevice device = null; try { device = new ARDiscoveryDevice(mContext, service); } catch (ARDiscoveryException e) { Log.e(TAG, "Exception", e); Log.e(TAG, "Error: " + e.getError()); } return device; } private ARDeviceController createDeviceController(@NonNull ARDiscoveryDevice discoveryDevice) { ARDeviceController deviceController = null; try { deviceController = new ARDeviceController(discoveryDevice); deviceController.addListener(mDeviceControllerListener); deviceController.addStreamListener(mStreamListener); } catch (ARControllerException e) { Log.e(TAG, "Exception", e); } return deviceController; } //region notify listener block private void notifySkyControllerConnectionChanged(ARCONTROLLER_DEVICE_STATE_ENUM state) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onSkyControllerConnectionChanged(state); } } private void notifyDroneConnectionChanged(ARCONTROLLER_DEVICE_STATE_ENUM state) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onDroneConnectionChanged(state); } } private void notifySkyControllerBatteryChanged(int battery) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onSkyControllerBatteryChargeChanged(battery); } } private void notifyDroneBatteryChanged(int battery) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onDroneBatteryChargeChanged(battery); } } private void notifyPilotingStateChanged(ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM state) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onPilotingStateChanged(state); } } private void notifyPictureTaken(ARCOMMANDS_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED_ERROR_ENUM error) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onPictureTaken(error); } } private void notifyConfigureDecoder(ARControllerCodec codec) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.configureDecoder(codec); } } private void notifyFrameReceived(ARFrame frame) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onFrameReceived(frame); } } private void notifyMatchingMediasFound(int nbMedias) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onMatchingMediasFound(nbMedias); } } private void notifyDownloadProgressed(String mediaName, int progress) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onDownloadProgressed(mediaName, progress); } } private void notifyDownloadComplete(String mediaName) { List<Listener> listenersCpy = new ArrayList<>(mListeners); for (Listener listener : listenersCpy) { listener.onDownloadComplete(mediaName); } } //endregion notify listener block private final SDCardModule.Listener mSDCardModuleListener = new SDCardModule.Listener() { @Override public void onMatchingMediasFound(final int nbMedias) { mHandler.post(new Runnable() { @Override public void run() { notifyMatchingMediasFound(nbMedias); } }); } @Override public void onDownloadProgressed(final String mediaName, final int progress) { mHandler.post(new Runnable() { @Override public void run() { notifyDownloadProgressed(mediaName, progress); } }); } @Override public void onDownloadComplete(final String mediaName) { mHandler.post(new Runnable() { @Override public void run() { notifyDownloadComplete(mediaName); } }); } }; private final ARDeviceControllerListener mDeviceControllerListener = new ARDeviceControllerListener() { @Override public void onStateChanged(ARDeviceController deviceController, ARCONTROLLER_DEVICE_STATE_ENUM newState, ARCONTROLLER_ERROR_ENUM error) { mSkyControllerState = newState; mHandler.post(new Runnable() { @Override public void run() { notifySkyControllerConnectionChanged(mSkyControllerState); } }); } @Override public void onExtensionStateChanged(ARDeviceController deviceController, ARCONTROLLER_DEVICE_STATE_ENUM newState, ARDISCOVERY_PRODUCT_ENUM product, String name, ARCONTROLLER_ERROR_ENUM error) { mDroneState = newState; if (ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING.equals(mDroneState)) { mDeviceController.startVideoStream(); } else if (ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_STOPPED.equals(mDroneState)) { mSDCardModule.cancelGetFlightMedias(); } mHandler.post(new Runnable() { @Override public void run() { notifyDroneConnectionChanged(mDroneState); } }); } @Override public void onCommandReceived(ARDeviceController deviceController, ARCONTROLLER_DICTIONARY_KEY_ENUM commandKey, ARControllerDictionary elementDictionary) { // if event received is the battery update if ((commandKey == ARCONTROLLER_DICTIONARY_KEY_ENUM.ARCONTROLLER_DICTIONARY_KEY_COMMON_COMMONSTATE_BATTERYSTATECHANGED) && (elementDictionary != null)) { ARControllerArgumentDictionary<Object> args = elementDictionary.get(ARControllerDictionary.ARCONTROLLER_DICTIONARY_SINGLE_KEY); if (args != null) { final int battery = (Integer) args.get(ARFeatureCommon.ARCONTROLLER_DICTIONARY_KEY_COMMON_COMMONSTATE_BATTERYSTATECHANGED_PERCENT); mHandler.post(new Runnable() { @Override public void run() { notifyDroneBatteryChanged(battery); } }); } } // if event received is the skyController battery update if ((commandKey == ARCONTROLLER_DICTIONARY_KEY_ENUM.ARCONTROLLER_DICTIONARY_KEY_SKYCONTROLLER_SKYCONTROLLERSTATE_BATTERYCHANGED) && (elementDictionary != null)) { ARControllerArgumentDictionary<Object> args = elementDictionary.get(ARControllerDictionary.ARCONTROLLER_DICTIONARY_SINGLE_KEY); if (args != null) { final int battery = (Integer) args.get(ARFeatureSkyController.ARCONTROLLER_DICTIONARY_KEY_SKYCONTROLLER_SKYCONTROLLERSTATE_BATTERYCHANGED_PERCENT); mHandler.post(new Runnable() { @Override public void run() { notifySkyControllerBatteryChanged(battery); } }); } } // if event received is the flying state update else if ((commandKey == ARCONTROLLER_DICTIONARY_KEY_ENUM.ARCONTROLLER_DICTIONARY_KEY_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED) && (elementDictionary != null)) { ARControllerArgumentDictionary<Object> args = elementDictionary.get(ARControllerDictionary.ARCONTROLLER_DICTIONARY_SINGLE_KEY); if (args != null) { final ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM state = ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM.getFromValue((Integer) args.get(ARFeatureARDrone3.ARCONTROLLER_DICTIONARY_KEY_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE)); mHandler.post(new Runnable() { @Override public void run() { mFlyingState = state; notifyPilotingStateChanged(state); } }); } } // if event received is the picture notification else if ((commandKey == ARCONTROLLER_DICTIONARY_KEY_ENUM.ARCONTROLLER_DICTIONARY_KEY_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED) && (elementDictionary != null)){ ARControllerArgumentDictionary<Object> args = elementDictionary.get(ARControllerDictionary.ARCONTROLLER_DICTIONARY_SINGLE_KEY); if (args != null) { final ARCOMMANDS_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED_ERROR_ENUM error = ARCOMMANDS_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED_ERROR_ENUM.getFromValue((Integer)args.get(ARFeatureARDrone3.ARCONTROLLER_DICTIONARY_KEY_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED_ERROR)); mHandler.post(new Runnable() { @Override public void run() { notifyPictureTaken(error); } }); } } // if event received is the run id else if ((commandKey == ARCONTROLLER_DICTIONARY_KEY_ENUM.ARCONTROLLER_DICTIONARY_KEY_COMMON_RUNSTATE_RUNIDCHANGED) && (elementDictionary != null)){ ARControllerArgumentDictionary<Object> args = elementDictionary.get(ARControllerDictionary.ARCONTROLLER_DICTIONARY_SINGLE_KEY); if (args != null) { final String runID = (String) args.get(ARFeatureCommon.ARCONTROLLER_DICTIONARY_KEY_COMMON_RUNSTATE_RUNIDCHANGED_RUNID); mHandler.post(new Runnable() { @Override public void run() { mCurrentRunId = runID; } }); } } } }; private final ARDeviceControllerStreamListener mStreamListener = new ARDeviceControllerStreamListener() { @Override public ARCONTROLLER_ERROR_ENUM configureDecoder(ARDeviceController deviceController, final ARControllerCodec codec) { notifyConfigureDecoder(codec); return ARCONTROLLER_ERROR_ENUM.ARCONTROLLER_OK; } @Override public ARCONTROLLER_ERROR_ENUM onFrameReceived(ARDeviceController deviceController, final ARFrame frame) { notifyFrameReceived(frame); return ARCONTROLLER_ERROR_ENUM.ARCONTROLLER_OK; } @Override public void onFrameTimeout(ARDeviceController deviceController) {} }; }
import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.net.Socket; import java.net.UnknownHostException; import java.util.Hashtable; import java.util.Iterator; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Random; import java.util.Scanner; import java.util.concurrent.Semaphore; public class Client { public static void main(String[] args) throws UnknownHostException, IOException, InterruptedException { if(args.length > 0) new Client(args[0]).Run(); else new Client().Run(); } private static Random mRandom = new Random(); private String mName; private int mID; private Socket mSocket; private Hashtable<String, Integer> mNameTable; private Hashtable<Integer, String> mIDTable; private PacketSender mSender; private Semaphore mSemaphore; public Client(){ this("unknown" + Integer.toString(mRandom.nextInt() % 1000)); } public Client(String name){ mName = name; mID = -1; mNameTable = new Hashtable<>(); mIDTable = new Hashtable<>(); mSemaphore = new Semaphore(1); } private void Run() throws UnknownHostException, IOException, InterruptedException { mSocket = new Socket("127.0.0.1", 12345); mSender = new PacketSender(mSocket.getOutputStream()); Scanner keyboard = new Scanner(System.in); System.out.println("The client connected to the server!"); mSender.sendRegisterClientCommand(mName); new Thread(new ServerHandler()).start(); while (keyboard.hasNextLine()) { Scanner scanner = new Scanner(keyboard.nextLine()); scanner.useDelimiter(" "); buildCommand(scanner); } keyboard.close(); mSender.close(); mSocket.close(); } private void buildCommand(Scanner scanner) throws InterruptedException, IOException{ try{ String command = scanner.next(); if(command.equals("send")){ buildSendMessageCommand(scanner); } else if(command.equals("list")){ listClients(); }else{ throw new NoSuchElementException("Invalid command."); } } catch(NoSuchElementException e){ String message = e.getLocalizedMessage(); if(message == null) message = "Invalid command."; System.out.println(message); } } private void listClients() throws InterruptedException{ mSemaphore.acquire(); if(mIDTable.size() <= 2) System.out.println("No other users online."); else System.out.println("Online users:"); for(Entry<Integer, String> entry : mIDTable.entrySet()){ if(entry.getKey() != mID && entry.getKey() != Server.SERVER_ID){ System.out.println(entry.getValue()); } } mSemaphore.release(); } private void buildSendMessageCommand(Scanner scanner) throws IOException, InterruptedException{ String token = scanner.next(); mSemaphore.acquire(); try{ if(mNameTable.containsKey(token)){ String message = scanner.nextLine().trim(); if(!message.isEmpty()) mSender.sendMessageCommand(message, mNameTable.get(token)); } else{ throw new NoSuchElementException("Invalid destination."); } } catch(NoSuchElementException e){ String message = e.getLocalizedMessage(); System.out.println(message); } mSemaphore.release(); } private class ServerHandler implements Runnable { @Override public void run() { try{ InputStream inputStream = mSocket.getInputStream(); ObjectInputStream stream = new ObjectInputStream(inputStream); Packet packet; while ((packet = (Packet)stream.readObject()) != null) { handlePacket(packet); } stream.close(); } catch(ClassNotFoundException | IOException e){ } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private void handlePacket(Packet packet) throws InterruptedException{ switch(packet.getType()){ case COMMAND: handleCommand((Command) packet); break; case EVENT: handleEvent((Event) packet); break; default: break; } } private void handleCommand(Command command){ switch(command.mType){ case SEND_MESSAGE: handleSendMessageCommand((SendMessageCommand)command); break; case REGISTER_CLIENT: break; default: break; } } private void handleSendMessageCommand(SendMessageCommand command){ System.out.println(mIDTable.get(command.getSource()) + " said: " + command.getMessage()); } private void handleEvent(Event event) throws InterruptedException{ switch(event.getEventType()){ case NEW_CLIENT: handleNewClientEvent((NewClientEvent) event); break; case ID_NOTIFY: handleIDNotifyEvent((IDNotifyEvent) event); break; case CLIENT_TABLE_NOTIFY: handleClientTableNotifyEvent((ClientTableNotifyEvent) event); break; case CONNECTION_LOST_NOTIFY: handleConnectionLostNotify((ConnectionLostNotify) event); break; case COMMAND_ERROR: handleCommandError((CommandErrorNotification) event); default: break; } } private void handleNewClientEvent(NewClientEvent event) throws InterruptedException{ Logger.clientOnline(event.getName()); mSemaphore.acquire(); mNameTable.put(event.getName(), event.getID()); mIDTable.put(event.getID(), event.getName()); mSemaphore.release(); } private void handleIDNotifyEvent(IDNotifyEvent event){ mID = event.getID(); mSender.setSenderID(mID); } private void handleClientTableNotifyEvent(ClientTableNotifyEvent event) throws InterruptedException{ Iterator<Entry<String, Integer>> iterator = event.getNameTable().entrySet().iterator(); mSemaphore.acquire(); while(iterator.hasNext()){ Entry<String, Integer> entry = iterator.next(); mNameTable.put(entry.getKey(), entry.getValue()); mIDTable.put(entry.getValue(), entry.getKey()); } mSemaphore.release(); for(Entry<Integer, String> entry : mIDTable.entrySet()){ if(entry.getKey() != Server.SERVER_ID && entry.getKey() != mID) Logger.clientOnline(entry.getValue()); } } private void handleConnectionLostNotify(ConnectionLostNotify event){ int ID = event.getID(); String name = mIDTable.get(ID); Logger.clientLeaved(name); mNameTable.remove(name); mIDTable.remove(ID); } private void handleCommandError(CommandErrorNotification error){ switch(error.getErrorType()){ case REGISTER_CLIENT: handleRegisterClientError((RegisterClientError) error); break; } } private void handleRegisterClientError(RegisterClientError error){ } } }
/*************************GO-LICENSE-START********************************* * Copyright 2015 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.cruise.page; import com.thoughtworks.cruise.Urls; import com.thoughtworks.cruise.client.TalkToCruise; import com.thoughtworks.cruise.client.TalkToCruise.CruiseResponse; import com.thoughtworks.cruise.state.CurrentPageState; import com.thoughtworks.cruise.state.ScenarioState; import com.thoughtworks.cruise.utils.Assertions; import com.thoughtworks.cruise.utils.Assertions.Predicate; import com.thoughtworks.cruise.utils.Timeout; import net.sf.sahi.client.Browser; import net.sf.sahi.client.ElementStub; import org.apache.commons.lang.StringUtils; import org.hamcrest.Matchers; import org.hamcrest.core.Is; import org.junit.Assert; import static junit.framework.Assert.assertTrue; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; public class AlreadyOnPipelineHistoryPage extends CruisePage { private final TalkToCruise talkToCruise; private String currentLabel; public AlreadyOnPipelineHistoryPage(CurrentPageState currentPageState, ScenarioState scenarioState, TalkToCruise talkToCruise, Browser browser) { super(scenarioState, true, browser); this.talkToCruise = talkToCruise; currentPageState.assertCurrentPageIs(CurrentPageState.Page.PIPELINE_HISTORY); } @com.thoughtworks.gauge.Step("Looking at pipeline with label <currentLabel>") public void lookingAtPipelineWithLabel(String currentLabel) throws Exception { this.currentLabel = currentLabel; } public void triggerNewPipeline() throws Exception { browser.button("force-run-pipeline").click(); } @com.thoughtworks.gauge.Step("Approve stage <stageName>") public void approveStage(String stageName) throws Exception { approveStageWithLabel(stageName, currentLabel()); } @com.thoughtworks.gauge.Step("Approve stage <stageName> with label <label>") public void approveStageWithLabel(String stageName, String label) throws Exception { boolean javascriptAlertBoxesStillExistAndAreUntestableWithWebDriver = true; // that sucks. if (javascriptAlertBoxesStillExistAndAreUntestableWithWebDriver) { String url = Urls.urlFor(String.format("/run/%s/%s/%s", scenarioState.currentRuntimePipelineName(), label, stageName)); System.err.println("posting to " + url); CruiseResponse response = talkToCruise.post(url); Assert.assertThat(String.format("Got back return code %s-%s from url %s", response.getStatus(), response.getBody(), url), response.isSuccess(), Is.is(true)); } else { browser.button(String.format("approve-%s-%s", label, stageName)).click(); } } private String currentLabel() { if (this.currentLabel == null) { throw new IllegalStateException("Must look at a particular pipeline instance"); } return this.currentLabel; } private void showBuildCauseMessage(final int row) { Assertions.waitUntil(Timeout.FIVE_SECONDS, new Predicate() { public boolean call() throws Exception { ElementStub pipelineRow = browser.row(row).in(browser.table("pipeline-history-group")); ElementStub link = browser.link(1).in(pipelineRow); link.click(); String buildCauseMessage = getBuildCauseMessage(row); return !org.apache.commons.lang.StringUtils.isEmpty(buildCauseMessage); } }); } private String getBuildCauseMessage(int row) { return browser.div("/build-cause-summary-container/").in(browser.row(row).in(browser.table("pipeline-history-group"))).getText(); } @com.thoughtworks.gauge.Step("Verify build cause message on row <row> contains <text> and not <text2>") public void verifyBuildCauseMessageOnRowContainsAndNot(Integer row, String text, String text2) throws Exception { showBuildCauseMessage(row); String msg = getBuildCauseMessage(row); org.junit.Assert.assertThat(msg, Matchers.containsString(scenarioState.expand(text))); org.junit.Assert.assertThat(msg, Matchers.not(Matchers.containsString(scenarioState.expand(text2)))); } //TODO: Get rid of this stupid duplication @Override protected String url() { return browserWrapper.getCurrentUrl(); } @com.thoughtworks.gauge.Step("Verify build cause message contains <shouldExist>") public void verifyBuildCauseMessageContains(String shouldExist) throws Exception { String msg = buildCauseText(); // String runtime_matcher = ".*(\\$\\{runtime_name:.*\\}).*"; // String variable_matcher = ".*:(.*)\\}"; // String replaceableVariable = ""; // Pattern pattern = Pattern.compile(runtime_matcher); // Pattern variablePattern = Pattern.compile(variable_matcher); // Matcher matcher = pattern.matcher(shouldExist); // String stateVariable; // // if (matcher.find()) // { // replaceableVariable = matcher.group(1); // Matcher stringMatcher = variablePattern.matcher(replaceableVariable); // System.out.println("below"); // System.out.println(replaceableVariable); // stateVariable = ""; // if(stringMatcher.find()) // { // stateVariable = stringMatcher.group(1); // } // // // System.out.println(stateVariable); // System.out.print("above"); org.junit.Assert.assertThat(msg, Matchers.containsString(scenarioState.expand(shouldExist))); } public void verifyBuildCauseMessageDoesNotContain(String shouldNotExist) throws Exception { String msg = buildCauseText(); org.junit.Assert.assertThat(msg, Matchers.not(Matchers.containsString(shouldNotExist))); } private String buildCauseText() { showBuildCauseMessage(); return getBuildCauseMessage(); } private String getBuildCauseMessage() { return browser.div("/build-cause-summary-container/").in(pipelineSelection()).getText(); } private void showBuildCauseMessage() { Assertions.waitUntil(Timeout.TWENTY_SECONDS, new Predicate() { public boolean call() throws Exception { browser.link("/Triggered by/").in(pipelineSelection()).click(); String buildCauseMessage = getBuildCauseMessage(); return !StringUtils.isEmpty(buildCauseMessage); } }); } private ElementStub pipelineSelection() { return browser.tableHeader("/^" + currentLabel + "/"); } @com.thoughtworks.gauge.Step("Verify on pipeline history page for <pipelineName>") public void verifyOnPipelineHistoryPageFor(String pipelineName) throws Exception { assertThat(browserWrapper.getCurrentUrl(), containsString(scenarioState.pipelineNamed(pipelineName))); } @com.thoughtworks.gauge.Step("Open changes section for counter <counter> - Already on pipeline history page") public void openChangesSectionForCounter(final String counter) throws Exception { Assertions.waitUntil(Timeout.TWENTY_SECONDS, new Predicate() { @Override public boolean call() throws Exception { ElementStub showChangesElement = browser.link("Triggered by view").near(browser.link(counter)); if (!showChangesElement.exists()) return false; showChangesElement.click(); return true; } }); } @com.thoughtworks.gauge.Step("Verify stage <stageName> of pipeline can be rerun") public void verifyStageOfPipelineCanBeRerun(final String stageName){ Assertions.waitUntil(Timeout.THIRTY_SECONDS, new Predicate(){ @Override public boolean call() throws Exception { String pipelineName = scenarioState.currentRuntimePipelineName(); return browser.byId("rerun-"+pipelineName+"-"+currentLabel()+"-"+stageName).exists(); } }); } @com.thoughtworks.gauge.Step("Verify stage <stageName> of pipeline cannot be rerun") public void verifyStageOfPipelineCannotBeRerun(String stageName){ String pipelineName = scenarioState.currentRuntimePipelineName(); assertTrue("Rerun link found for run "+currentLabel()+" for pipeline "+pipelineName, !browser.byId("rerun-"+pipelineName+"-"+currentLabel()+"-"+stageName).exists()); } @com.thoughtworks.gauge.Step("Rerun stage <stageName> - Already On Pipeline History Page") public void rerunStage(String stageName) { String pipelineName = scenarioState.currentRuntimePipelineName(); browser.byId("rerun-"+pipelineName+"-"+currentLabel()+"-"+stageName).click(); } @com.thoughtworks.gauge.Step("Verify <stageName> stage can be cancelled") public void verifyStageCanBeCancelled(String stageName) throws Exception { String pipelineName = scenarioState.currentRuntimePipelineName(); Assert.assertThat(browser.byId("cancel-"+pipelineName+"-"+currentLabel()+"-"+stageName).exists(), Is.is(true)); } @com.thoughtworks.gauge.Step("Cancel stage <stageName>") public void cancelStage(String stageName){ String pipelineName = scenarioState.currentRuntimePipelineName(); browser.byId("cancel-"+pipelineName+"-"+currentLabel()+"-"+stageName).click(); } @com.thoughtworks.gauge.Step("Pause pipeline on activity page") public void pausePipelineOnActivityPage() throws Exception { browser.byId("pause-"+scenarioState.currentRuntimePipelineName()).click(); } @com.thoughtworks.gauge.Step("Verify pipeline is paused on pipeline activity page") public void verifyPipelineIsPausedOnPipelineActivityPage() throws Exception { reloadPage(); assertEquals("Pipeline not paused", "unpause", browser.byId("pause-"+scenarioState.currentRuntimePipelineName()).getText().toLowerCase()); } @com.thoughtworks.gauge.Step("Unpause pipeline on pipeline activity page") public void unpausePipelineOnPipelineActivityPage() throws Exception { browser.byId("pause-"+scenarioState.currentRuntimePipelineName()).click(); reloadPage(); } @com.thoughtworks.gauge.Step("Verify pipeline cannot be paused") public void verifyPipelineCannotBePaused() throws Exception { Assert.assertThat(!browser.byId("pause-"+scenarioState.currentRuntimePipelineName()).exists(),Is.is(true)); } @com.thoughtworks.gauge.Step("Verify <stageName> can be approved") public void verifyCanBeApproved(final String stageName) throws Exception { Assertions.waitUntil(Timeout.THIRTY_SECONDS, new Predicate() { @Override public boolean call() throws Exception { ElementStub approveElement = browser.byId("approve-"+currentLabel()+"-"+stageName); if (approveElement.exists()) return true; else return false; } }); } @com.thoughtworks.gauge.Step("Verify <stageName> cannot be approved") public void verifyCannotBeApproved(final String stageName) throws Exception { Assertions.waitUntil(Timeout.THIRTY_SECONDS, new Predicate() { @Override public boolean call() throws Exception { ElementStub approveElement = browser.byId("approve-"+currentLabel()+"-"+stageName); if (approveElement.exists()) return false; else return true; } }); } @com.thoughtworks.gauge.Step("Verify pipeline is triggered by <user>") public void verifyPipelineIsTriggeredBy(final String user) throws Exception { Assertions.waitUntil(Timeout.THIRTY_SECONDS, new Predicate() { @Override public boolean call() throws Exception { return browser.link("/Triggered by/").in(pipelineSelection()).getText().equals("Triggered by "+user); } }); } public void waitForStageToPass(final String stageName) throws Exception { Assertions.waitUntil(Timeout.TWENTY_SECONDS, new Predicate(){ @Override public boolean call() throws Exception { reloadPage(); return browser.div("passed-stage").in(browser.byId("stage-detail-"+currentLabel()+"-"+stageName)).exists(); } }); } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package net.starlark.java.cmd; import static java.nio.charset.StandardCharsets.UTF_8; import java.io.BufferedReader; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.time.Duration; import net.starlark.java.eval.EvalException; import net.starlark.java.eval.Module; import net.starlark.java.eval.Mutability; import net.starlark.java.eval.Starlark; import net.starlark.java.eval.StarlarkSemantics; import net.starlark.java.eval.StarlarkThread; import net.starlark.java.syntax.FileOptions; import net.starlark.java.syntax.ParserInput; import net.starlark.java.syntax.SyntaxError; /** * Main is a standalone interpreter for the core Starlark language. It does not yet support load * statements. * * <p>The sad class name is due to the linting tool, which forbids lowercase "starlark", and Java's * lack of renaming imports, which makes the name "Starlark" impractical due to conflicts with * eval.Starlark. */ class Main { private static final String START_PROMPT = ">> "; private static final String CONTINUATION_PROMPT = ".. "; private static final BufferedReader reader = new BufferedReader(new InputStreamReader(System.in, UTF_8)); private static final StarlarkThread thread; private static final Module module = Module.create(); // TODO(adonovan): set load-binds-globally option when we support load, // so that loads bound in one REPL chunk are visible in the next. private static final FileOptions OPTIONS = FileOptions.DEFAULT; static { Mutability mu = Mutability.create("interpreter"); thread = new StarlarkThread(mu, StarlarkSemantics.DEFAULT); thread.setPrintHandler((th, msg) -> System.out.println(msg)); } private static String prompt() { StringBuilder input = new StringBuilder(); System.out.print(START_PROMPT); try { String lineSeparator = ""; while (true) { String line = reader.readLine(); if (line == null) { return null; } if (line.isEmpty()) { return input.toString(); } input.append(lineSeparator).append(line); lineSeparator = "\n"; System.out.print(CONTINUATION_PROMPT); } } catch (IOException e) { System.err.format("Error reading line: %s\n", e); return null; } } /** Provide a REPL evaluating Starlark code. */ @SuppressWarnings("CatchAndPrintStackTrace") private static void readEvalPrintLoop() { System.err.println("Welcome to Starlark (java.starlark.net)"); String line; // TODO(adonovan): parse a compound statement, like the Python and // go.starlark.net REPLs. This requires a new grammar production, and // integration with the lexer so that it consumes new // lines only until the parse is complete. while ((line = prompt()) != null) { ParserInput input = ParserInput.fromString(line, "<stdin>"); try { Object result = Starlark.execFile(input, OPTIONS, module, thread); if (result != Starlark.NONE) { System.out.println(Starlark.repr(result)); } } catch (SyntaxError.Exception ex) { for (SyntaxError error : ex.errors()) { System.err.println(error); } } catch (EvalException ex) { // TODO(adonovan): provide a SourceReader. Requires that we buffer the // entire history so that line numbers don't reset in each chunk. System.err.println(ex.getMessageWithStack()); } catch (InterruptedException ex) { System.err.println("Interrupted"); } } } /** Execute a Starlark file. */ private static int execute(ParserInput input) { try { Starlark.execFile(input, OPTIONS, module, thread); return 0; } catch (SyntaxError.Exception ex) { for (SyntaxError error : ex.errors()) { System.err.println(error); } return 1; } catch (EvalException ex) { System.err.println(ex.getMessageWithStack()); return 1; } catch (InterruptedException e) { System.err.println("Interrupted"); return 1; } } public static void main(String[] args) throws IOException { String file = null; String cmd = null; String cpuprofile = null; // parse flags int i; for (i = 0; i < args.length; i++) { if (!args[i].startsWith("-")) { break; } if (args[i].equals("--")) { i++; break; } if (args[i].equals("-c")) { if (i + 1 == args.length) { throw new IOException("-c <cmd> flag needs an argument"); } cmd = args[++i]; } else if (args[i].equals("-cpuprofile")) { if (i + 1 == args.length) { throw new IOException("-cpuprofile <file> flag needs an argument"); } cpuprofile = args[++i]; } else { throw new IOException("unknown flag: " + args[i]); } } // positional arguments if (i < args.length) { if (i + 1 < args.length) { throw new IOException("too many positional arguments"); } file = args[i]; } if (cpuprofile != null) { FileOutputStream out = new FileOutputStream(cpuprofile); Starlark.startCpuProfile(out, Duration.ofMillis(10)); } int exit; if (file == null) { if (cmd != null) { exit = execute(ParserInput.fromString(cmd, "<command-line>")); } else { readEvalPrintLoop(); exit = 0; } } else if (cmd == null) { try { exit = execute(ParserInput.readFile(file)); } catch (IOException e) { // This results in such lame error messages as: // "Error reading a.star: java.nio.file.NoSuchFileException: a.star" System.err.format("Error reading %s: %s\n", file, e); exit = 1; } } else { System.err.println("usage: Starlark [-cpuprofile file] [-c cmd | file]"); exit = 1; } if (cpuprofile != null) { Starlark.stopCpuProfile(); } System.exit(exit); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.collect; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.UnmodifiableIterator; import org.apache.lucene.util.mutable.MutableValueInt; import java.lang.reflect.Array; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Collection; import java.util.Deque; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; /** * An immutable map whose writes result in a new copy of the map to be created. * * This is essentially a hash array mapped trie: inner nodes use a bitmap in * order to map hashes to slots by counting ones. In case of a collision (two * values having the same 32-bits hash), a leaf node is created which stores * and searches for values sequentially. * * Reads and writes both perform in logarithmic time. Null keys and values are * not supported. * * This structure might need to perform several object creations per write so * it is better suited for work-loads that are not too write-intensive. * * @see <a href="http://en.wikipedia.org/wiki/Hash_array_mapped_trie">the wikipedia page</a> */ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> { private static final int TOTAL_HASH_BITS = 32; private static final Object[] EMPTY_ARRAY = new Object[0]; private static final int HASH_BITS = 6; private static final int HASH_MASK = 0x3F; /** * Return a copy of the provided map. */ public static <K, V> CopyOnWriteHashMap<K, V> copyOf(Map<? extends K, ? extends V> map) { if (map instanceof CopyOnWriteHashMap) { // no need to copy in that case @SuppressWarnings("unchecked") final CopyOnWriteHashMap<K, V> cowMap = (CopyOnWriteHashMap<K, V>) map; return cowMap; } else { return new CopyOnWriteHashMap<K, V>().copyAndPutAll(map); } } /** * Abstraction of a node, implemented by both inner and leaf nodes. */ private static abstract class Node<K, V> { /** * Recursively get the key with the given hash. */ abstract V get(Object key, int hash); /** * Recursively add a new entry to this node. <code>hashBits</code> is * the number of bits that are still set in the hash. When this value * reaches a number that is less than or equal to <tt>0</tt>, a leaf * node needs to be created since it means that a collision occurred * on the 32 bits of the hash. */ abstract Node<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue); /** * Recursively remove an entry from this node. */ abstract Node<K, V> remove(Object key, int hash); /** * For the current node only, append entries that are stored on this * node to <code>entries</code> and sub nodes to <code>nodes</code>. */ abstract void visit(Deque<Map.Entry<K, V>> entries, Deque<Node<K, V>> nodes); /** * Whether this node stores nothing under it. */ abstract boolean isEmpty(); } /** * A leaf of the tree where all hashes are equal. Values are added and retrieved in linear time. */ private static class Leaf<K, V> extends Node<K, V> { private final K[] keys; private final V[] values; Leaf(K[] keys, V[] values) { this.keys = keys; this.values = values; } @SuppressWarnings("unchecked") Leaf() { this((K[]) EMPTY_ARRAY, (V[]) EMPTY_ARRAY); } @Override boolean isEmpty() { return keys.length == 0; } @Override void visit(Deque<Map.Entry<K, V>> entries, Deque<Node<K, V>> nodes) { for (int i = 0; i < keys.length; ++i) { entries.add(new AbstractMap.SimpleImmutableEntry<>(keys[i], values[i])); } } @Override V get(Object key, int hash) { for (int i = 0; i < keys.length; i++) { if (key.equals(keys[i])) { return values[i]; } } return null; } private static <T> T[] replace(T[] array, int index, T value) { final T[] copy = Arrays.copyOf(array, array.length); copy[index] = value; return copy; } @Override Leaf<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) { assert hashBits <= 0 : hashBits; int slot = -1; for (int i = 0; i < keys.length; i++) { if (key.equals(keys[i])) { slot = i; break; } } final K[] keys2; final V[] values2; if (slot < 0) { keys2 = appendElement(keys, key); values2 = appendElement(values, value); newValue.value = 1; } else { keys2 = replace(keys, slot, key); values2 = replace(values, slot, value); } return new Leaf<>(keys2, values2); } @Override Leaf<K, V> remove(Object key, int hash) { int slot = -1; for (int i = 0; i < keys.length; i++) { if (key.equals(keys[i])) { slot = i; break; } } if (slot < 0) { return this; } final K[] keys2 = removeArrayElement(keys, slot); final V[] values2 = removeArrayElement(values, slot); return new Leaf<>(keys2, values2); } } private static <T> T[] removeArrayElement(T[] array, int index) { final Object result = Array.newInstance(array.getClass().getComponentType(), array.length - 1); System.arraycopy(array, 0, result, 0, index); if (index < array.length - 1) { System.arraycopy(array, index + 1, result, index, array.length - index - 1); } return (T[]) result; } public static <T> T[] appendElement(final T[] array, final T element) { final T[] newArray = Arrays.copyOf(array, array.length + 1); newArray[newArray.length - 1] = element; return newArray; } public static <T> T[] insertElement(final T[] array, final T element, final int index) { final T[] result = Arrays.copyOf(array, array.length + 1); System.arraycopy(array, 0, result, 0, index); result[index] = element; if (index < array.length) { System.arraycopy(array, index, result, index + 1, array.length - index); } return result; } /** * An inner node in this trie. Inner nodes store up to 64 key-value pairs * and use a bitmap in order to associate hashes to them. For example, if * an inner node contains 5 values, then 5 bits will be set in the bitmap * and the ordinal of the bit set in this bit map will be the slot number. * * As a consequence, the number of slots in an inner node is equal to the * number of one bits in the bitmap. */ private static class InnerNode<K, V> extends Node<K, V> { private final long mask; // the bitmap private final K[] keys; final Object[] subNodes; // subNodes[slot] is either a value or a sub node in case of a hash collision InnerNode(long mask, K[] keys, Object[] subNodes) { this.mask = mask; this.keys = keys; this.subNodes = subNodes; assert consistent(); } // only used in assert private boolean consistent() { assert Long.bitCount(mask) == keys.length; assert Long.bitCount(mask) == subNodes.length; for (int i = 0; i < keys.length; ++i) { if (subNodes[i] instanceof Node) { assert keys[i] == null; } else { assert keys[i] != null; } } return true; } @Override boolean isEmpty() { return mask == 0; } @SuppressWarnings("unchecked") InnerNode() { this(0, (K[]) EMPTY_ARRAY, EMPTY_ARRAY); } @Override void visit(Deque<Map.Entry<K, V>> entries, Deque<Node<K, V>> nodes) { for (int i = 0; i < keys.length; ++i) { final Object sub = subNodes[i]; if (sub instanceof Node) { @SuppressWarnings("unchecked") final Node<K, V> subNode = (Node<K, V>) sub; assert keys[i] == null; nodes.add(subNode); } else { @SuppressWarnings("unchecked") final V value = (V) sub; entries.add(new AbstractMap.SimpleImmutableEntry<>(keys[i], value)); } } } /** * For a given hash on 6 bits, its value is set if the bitmap has a one * at the corresponding index. */ private boolean exists(int hash6) { return (mask & (1L << hash6)) != 0; } /** * For a given hash on 6 bits, the slot number is the number of one * bits on the right of the <code>hash6</code>-th bit. */ private int slot(int hash6) { return Long.bitCount(mask & ((1L << hash6) - 1)); } @Override V get(Object key, int hash) { final int hash6 = hash & HASH_MASK; if (!exists(hash6)) { return null; } final int slot = slot(hash6); final Object sub = subNodes[slot]; assert sub != null; if (sub instanceof Node) { assert keys[slot] == null; // keys don't make sense on inner nodes @SuppressWarnings("unchecked") final Node<K, V> subNode = (Node<K, V>) sub; return subNode.get(key, hash >>> HASH_BITS); } else { if (keys[slot].equals(key)) { @SuppressWarnings("unchecked") final V v = (V) sub; return v; } else { // we have an entry for this hash, but the value is different return null; } } } private Node<K, V> newSubNode(int hashBits) { if (hashBits <= 0) { return new Leaf<K, V>(); } else { return new InnerNode<K, V>(); } } private InnerNode<K, V> putExisting(K key, int hash, int hashBits, int slot, V value, MutableValueInt newValue) { final K[] keys2 = Arrays.copyOf(keys, keys.length); final Object[] subNodes2 = Arrays.copyOf(subNodes, subNodes.length); final Object previousValue = subNodes2[slot]; if (previousValue instanceof Node) { // insert recursively assert keys[slot] == null; subNodes2[slot] = ((Node<K, V>) previousValue).put(key, hash, hashBits, value, newValue); } else if (keys[slot].equals(key)) { // replace the existing entry subNodes2[slot] = value; } else { // hash collision final K previousKey = keys[slot]; final int previousHash = previousKey.hashCode() >>> (TOTAL_HASH_BITS - hashBits); Node<K, V> subNode = newSubNode(hashBits); subNode = subNode.put(previousKey, previousHash, hashBits, (V) previousValue, newValue); subNode = subNode.put(key, hash, hashBits, value, newValue); keys2[slot] = null; subNodes2[slot] = subNode; } return new InnerNode<>(mask, keys2, subNodes2); } private InnerNode<K, V> putNew(K key, int hash6, int slot, V value) { final long mask2 = mask | (1L << hash6); final K[] keys2 = insertElement(keys, key, slot); final Object[] subNodes2 = insertElement(subNodes, value, slot); return new InnerNode<>(mask2, keys2, subNodes2); } @Override InnerNode<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) { final int hash6 = hash & HASH_MASK; final int slot = slot(hash6); if (exists(hash6)) { hash >>>= HASH_BITS; hashBits -= HASH_BITS; return putExisting(key, hash, hashBits, slot, value, newValue); } else { newValue.value = 1; return putNew(key, hash6, slot, value); } } private InnerNode<K, V> removeSlot(int hash6, int slot) { final long mask2 = mask & ~(1L << hash6); final K[] keys2 = removeArrayElement(keys, slot); final Object[] subNodes2 = removeArrayElement(subNodes, slot); return new InnerNode<>(mask2, keys2, subNodes2); } @Override InnerNode<K, V> remove(Object key, int hash) { final int hash6 = hash & HASH_MASK; if (!exists(hash6)) { return this; } final int slot = slot(hash6); final Object previousValue = subNodes[slot]; if (previousValue instanceof Node) { @SuppressWarnings("unchecked") final Node<K, V> subNode = (Node<K, V>) previousValue; final Node<K, V> removed = subNode.remove(key, hash >>> HASH_BITS); if (removed == subNode) { // not in sub-nodes return this; } if (removed.isEmpty()) { return removeSlot(hash6, slot); } final K[] keys2 = Arrays.copyOf(keys, keys.length); final Object[] subNodes2 = Arrays.copyOf(subNodes, subNodes.length); subNodes2[slot] = removed; return new InnerNode<>(mask, keys2, subNodes2); } else if (keys[slot].equals(key)) { // remove entry return removeSlot(hash6, slot); } else { // hash collision, nothing to remove return this; } } } private static class EntryIterator<K, V> extends UnmodifiableIterator<Map.Entry<K, V>> { private final Deque<Map.Entry<K, V>> entries; private final Deque<Node<K, V>> nodes; public EntryIterator(Node<K, V> node) { entries = new ArrayDeque<>(); nodes = new ArrayDeque<>(); node.visit(entries, nodes); } @Override public boolean hasNext() { return !entries.isEmpty() || !nodes.isEmpty(); } @Override public Map.Entry<K, V> next() { while (entries.isEmpty()) { if (nodes.isEmpty()) { throw new NoSuchElementException(); } final Node<K, V> nextNode = nodes.pop(); nextNode.visit(entries, nodes); } return entries.pop(); } } private final InnerNode<K, V> root; private final int size; /** * Create a new empty map. */ public CopyOnWriteHashMap() { this(new InnerNode<K, V>(), 0); } private CopyOnWriteHashMap(InnerNode<K, V> root, int size) { this.root = root; this.size = size; } @Override public boolean containsKey(Object key) { // works fine since null values are not supported return get(key) != null; } @Override public V get(Object key) { Preconditions.checkArgument(key != null, "Null keys are not supported"); final int hash = key.hashCode(); return root.get(key, hash); } @Override public int size() { assert size != 0 || root.isEmpty(); return size; } /** * Associate <code>key</code> with <code>value</code> and return a new copy * of the hash table. The current hash table is not modified. */ public CopyOnWriteHashMap<K, V> copyAndPut(K key, V value) { Preconditions.checkArgument(key != null, "null keys are not supported"); Preconditions.checkArgument(value != null, "null values are not supported"); final int hash = key.hashCode(); final MutableValueInt newValue = new MutableValueInt(); final InnerNode<K, V> newRoot = root.put(key, hash, TOTAL_HASH_BITS, value, newValue); final int newSize = size + newValue.value; return new CopyOnWriteHashMap<>(newRoot, newSize); } /** * Same as {@link #copyAndPut(Object, Object)} but for an arbitrary number of entries. */ public CopyOnWriteHashMap<K, V> copyAndPutAll(Map<? extends K, ? extends V> other) { return copyAndPutAll(other.entrySet()); } public <K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Iterable<Entry<K1, V1>> entries) { CopyOnWriteHashMap<K, V> result = this; for (Entry<K1, V1> entry : entries) { result = result.copyAndPut(entry.getKey(), entry.getValue()); } return result; } public <K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Stream<Entry<K1, V1>> entries) { return copyAndPutAll(entries::iterator); } /** * Remove the given key from this map. The current hash table is not modified. */ public CopyOnWriteHashMap<K, V> copyAndRemove(Object key) { Preconditions.checkArgument(key != null, "Null keys are not supported"); final int hash = key.hashCode(); final InnerNode<K, V> newRoot = root.remove(key, hash); if (root == newRoot) { return this; } else { return new CopyOnWriteHashMap<>(newRoot, size - 1); } } /** * Same as {@link #copyAndRemove(Object)} but for an arbitrary number of entries. */ public CopyOnWriteHashMap<K, V> copyAndRemoveAll(Collection<?> keys) { CopyOnWriteHashMap<K, V> result = this; for (Object key : keys) { result = result.copyAndRemove(key); } return result; } @Override public Set<Map.Entry<K, V>> entrySet() { return new AbstractSet<Map.Entry<K, V>>() { @Override public Iterator<java.util.Map.Entry<K, V>> iterator() { return new EntryIterator<>(root); } @Override public boolean contains(Object o) { if (o == null || !(o instanceof Map.Entry)) { return false; } Map.Entry<?, ?> entry = (java.util.Map.Entry<?, ?>) o; return entry.getValue().equals(CopyOnWriteHashMap.this.get(entry.getKey())); } @Override public int size() { return CopyOnWriteHashMap.this.size(); } }; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package generic.constraint; import java.io.*; import java.util.*; import org.xml.sax.*; import generic.jar.ResourceFile; import ghidra.xml.*; /** * A decisionTree is used to find property values that are determined by traversing a tree * of constraints. Each node in the tree has an associated constraint. If the constraint is * satisfied for a given test object, then its child nodes are tested to find more and more * specific results. When either there are no children in a node or none of the children's * constraints are satisfied or by traversing those that are satisfied did not result in find * a property match, the current node is check to see if it has a value for the property being * search. If so, that result is added as a Decision. * * <P> There can be multiple paths where all constraints a matched resulting in multiple possible * decisions.</P> * <P> A non-leaf node can have properties as well, that serve as a default if it's constraint * is satisfied, but not of its children is satisfied or resulted in a decision.</P> * * @param <T> the type of object that the constraints are checked against. */ public class DecisionTree<T> { private DecisionNode<T> root; private Map<String, Class<? extends Constraint<T>>> constraintClassMap; private Set<String> propertyNameSet; public DecisionTree() { root = new RootDecisionNode<T>(); constraintClassMap = new HashMap<String, Class<? extends Constraint<T>>>(); propertyNameSet = new HashSet<String>(); } /** * Searches the decision tree for values of given property name that match the constraints * within this tree. * @param testObject the object that the constraints are test against. * @param propertyName the name of the property whose values are being collected. * @return a DecisionSet containing all the values of the given property whose path in the * tree matched all the constraints for the given test object. */ public DecisionSet getDecisionsSet(T testObject, String propertyName) { DecisionSet decisionSet = new DecisionSet(propertyName); root.populateDecisions(testObject, decisionSet, propertyName); return decisionSet; } /** * Registers a constraint class to be recognized from an xml constraint specification file. * @param name the name of the constraint which is also the xml tag value. * @param constraintClass the constraint type which will be initialized from the xml constraint * specification file. */ public void registerConstraintType(String name, Class<? extends Constraint<T>> constraintClass) { constraintClassMap.put(name, constraintClass); } /** * Registers a property name. Every tag in an xml constraint file (except the root tag which * is unused) must be either a constraint name or a property name. * @param propertyName the name of a valid property to be expected in an xml constraints file. */ public void registerPropertyName(String propertyName) { propertyNameSet.add(propertyName); } /** * Loads the tree from an xml data contained within an input stream. Note: this method can be * called multiple times, with each call appending to the existing tree. * @param name the name of the input source so that decisions can be traced back to * the appropriate xml constraints source. * @param stream the InputStream from which to read an xml constraints specification. * @throws IOException if an I/O problem occurs reading from the stream. * @throws XmlParseException if the XML is not property formatted or a tag that is not * a constraint name or property name is encountered. */ public void loadConstraints(String name, InputStream stream) throws IOException, XmlParseException { XmlPullParser parser; try { parser = new NonThreadedXmlPullParserImpl(stream, name, new XMLErrorHandler(), false); } catch (SAXException e) { throw new XmlParseException("Sax Exception", e); } parser.next(); // skip root element start processSubContraintsAndProperties(root, parser); } /** * Loads the tree from an xml constraint file. Note: this method can be called multiple times, * with each call appending to the existing tree. * @param file the file that contains the xml for the constraint. * @throws IOException if an I/O problem occurs reading from the stream. * @throws XmlParseException if the XML is not property formatted or a tag that is not * a constraint name or property name is encountered. */ public void loadConstraints(ResourceFile file) throws FileNotFoundException, IOException, XmlParseException { InputStream inputStream = file.getInputStream(); String name = file.getName(); loadConstraints(name, inputStream); inputStream.close(); } private void processSubContraintsAndProperties(DecisionNode<T> parent, XmlPullParser parser) throws XmlParseException { XmlElement element = parser.next(); while (!element.isEnd()) { Constraint<T> constraint = readConstraint(element); if (constraint != null) { DecisionNode<T> node = parent.getOrCreateNodeForContraint(constraint); processSubContraintsAndProperties(node, parser); } else if (propertyNameSet.contains(element.getName())) { processPropertyElement(parent, element, parser); } else { throw new XmlParseException("Unknown element tag: " + element.getName()); } element = parser.next(); } } private Constraint<T> getConstraint(String name) throws XmlParseException { Class<? extends Constraint<T>> constraintClass = constraintClassMap.get(name); if (constraintClass == null) { return null; } try { return constraintClass.newInstance(); } catch (Exception e) { throw new XmlParseException( "Can't create constraint instance for class " + constraintClass.getName(), e); } } private void processPropertyElement(DecisionNode<T> node, XmlElement element, XmlPullParser parser) throws XmlParseException { String propertyName = element.getName(); XmlElement nextElement = parser.next(); if (!nextElement.isEnd()) { throw new XmlParseException("Expected end tag for property " + propertyName); } node.setProperty(propertyName, nextElement.getText(), parser.getName()); } private Constraint<T> readConstraint(XmlElement element) throws XmlParseException { String name = element.getName(); Constraint<T> constraint = getConstraint(name); if (constraint == null) { return null; } constraint.loadConstraintData(new ConstraintData(element.getAttributes())); return constraint; } private static class XMLErrorHandler implements ErrorHandler { @Override public void error(SAXParseException exception) throws SAXException { throw new SAXException("Error: " + exception); } @Override public void fatalError(SAXParseException exception) throws SAXException { throw new SAXException("Fatal error: " + exception); } @Override public void warning(SAXParseException exception) throws SAXException { throw new SAXException("Warning: " + exception); } } }
/************************************************************************** Exchange Web Services Java API Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. **************************************************************************/ package microsoft.exchange.webservices.data; import java.io.IOException; import org.apache.commons.httpclient.Credentials; import org.apache.commons.httpclient.HttpMethod; import org.apache.commons.httpclient.NTCredentials; import org.apache.commons.httpclient.auth.AuthChallengeParser; import org.apache.commons.httpclient.auth.AuthScheme; import org.apache.commons.httpclient.auth.AuthenticationException; import org.apache.commons.httpclient.auth.InvalidCredentialsException; import org.apache.commons.httpclient.auth.MalformedChallengeException; /** * This is a reimplementation of HTTPClient 3.x's * org.apache.commons.httpclient.auth.NTLMScheme.<BR/> * It will basically use JCIFS (v1.3.15) in order to provide added support for * NTLMv2 (instead of trying to create its own Type, 2 and 3 messages). <BR/> * This class has to be registered manually with HTTPClient before setting * NTCredentials: AuthPolicy.registerAuthScheme(AuthPolicy.NTLM, * JCIFS_NTLMScheme.class); <BR/> * Will <B>not</B> work with HttpClient 4.x which requires AuthEngine to be overriden instead of AuthScheme. * */ public class EwsJCIFSNTLMScheme implements AuthScheme { /** NTLM challenge string. */ private String ntlmchallenge = null; private static final int UNINITIATED = 0; private static final int INITIATED = 1; private static final int TYPE1_MSG_GENERATED = 2; private static final int TYPE2_MSG_RECEIVED = 3; private static final int TYPE3_MSG_GENERATED = 4; private static final int FAILED = Integer.MAX_VALUE; /** Authentication process state */ private int state; public EwsJCIFSNTLMScheme() throws AuthenticationException { // Check if JCIFS is present. If not present, do not proceed. try { Class.forName("jcifs.ntlmssp.NtlmMessage",false,this.getClass().getClassLoader()); } catch (ClassNotFoundException e) { throw new AuthenticationException("Unable to proceed as JCIFS library is not found."); } } public String authenticate(Credentials credentials, HttpMethod method) throws AuthenticationException { if (this.state == UNINITIATED) { throw new IllegalStateException( "NTLM authentication process has not been initiated"); } NTCredentials ntcredentials = null; try { ntcredentials = (NTCredentials) credentials; } catch (ClassCastException e) { throw new InvalidCredentialsException( "Credentials cannot be used for NTLM authentication: " + credentials.getClass().getName()); } NTLM ntlm = new NTLM(); ntlm.setCredentialCharset(method.getParams().getCredentialCharset()); String response = null; if (this.state == INITIATED || this.state == FAILED) { response = ntlm.generateType1Msg(ntcredentials.getHost(), ntcredentials.getDomain()); this.state = TYPE1_MSG_GENERATED; } else { response = ntlm.generateType3Msg(ntcredentials.getUserName(), ntcredentials.getPassword(), ntcredentials.getHost(), ntcredentials.getDomain(), this.ntlmchallenge); this.state = TYPE3_MSG_GENERATED; } return "NTLM " + response; } public String authenticate(Credentials credentials, String method, String uri) throws AuthenticationException { throw new RuntimeException( "Not implemented as it is deprecated anyway in Httpclient 3.x"); } public String getID() { throw new RuntimeException( "Not implemented as it is deprecated anyway in Httpclient 3.x"); } /** * Returns the authentication parameter with the given name, if available. * * <p> * There are no valid parameters for NTLM authentication so this method * always returns <tt>null</tt>. * </p> * * @param name * The name of the parameter to be returned * * @return the parameter with the given name */ public String getParameter(String name) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null"); } return null; } /** * The concept of an authentication realm is not supported by the NTLM * authentication scheme. Always returns <code>null</code>. * * @return <code>null</code> */ public String getRealm() { return null; } /** * Returns textual designation of the NTLM authentication scheme. * * @return <code>ntlm</code> */ public String getSchemeName() { return "ntlm"; } /** * Tests if the NTLM authentication process has been completed. * * @return <tt>true</tt> if Basic authorization has been processed, * <tt>false</tt> otherwise. * * @since 3.0 */ public boolean isComplete() { return this.state == TYPE3_MSG_GENERATED || this.state == FAILED; } /** * Returns <tt>true</tt>. NTLM authentication scheme is connection based. * * @return <tt>true</tt>. * * @since 3.0 */ public boolean isConnectionBased() { return true; } /** * Processes the NTLM challenge. * * @param challenge * the challenge string * * @throws MalformedChallengeException * is thrown if the authentication challenge is malformed * * @since 3.0 */ public void processChallenge(final String challenge) throws MalformedChallengeException { String s = AuthChallengeParser.extractScheme(challenge); if (!s.equalsIgnoreCase(getSchemeName())) { throw new MalformedChallengeException("Invalid NTLM challenge: " + challenge); } int i = challenge.indexOf(' '); if (i != -1) { s = challenge.substring(i, challenge.length()); this.ntlmchallenge = s.trim(); this.state = TYPE2_MSG_RECEIVED; } else { this.ntlmchallenge = ""; if (this.state == UNINITIATED) { this.state = INITIATED; } else { this.state = FAILED; } } } private class NTLM { /** Character encoding */ public static final String DEFAULT_CHARSET = "ASCII"; /** * The character was used by 3.x's NTLM to encode the username and * password. Apparently, this is not needed in when passing username, * password from NTCredentials to the JCIFS library */ private String credentialCharset = DEFAULT_CHARSET; void setCredentialCharset(String credentialCharset) { this.credentialCharset = credentialCharset; } private String generateType1Msg(String host, String domain) { jcifs.ntlmssp.Type1Message t1m = new jcifs.ntlmssp.Type1Message(jcifs.ntlmssp.Type1Message.getDefaultFlags(), domain, host); return jcifs.util.Base64.encode(t1m.toByteArray()); } private String generateType3Msg(String username, String password, String host, String domain, String challenge) { jcifs.ntlmssp.Type2Message t2m; try { t2m = new jcifs.ntlmssp.Type2Message(jcifs.util.Base64.decode(challenge)); } catch (IOException e) { throw new RuntimeException("Invalid Type2 message", e); } jcifs.ntlmssp.Type3Message t3m = new jcifs.ntlmssp.Type3Message(t2m, password, domain, username, host, 0); return jcifs.util.Base64.encode(t3m.toByteArray()); } } }
package com.teamfrugal.budgetapp.ui; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import android.app.Activity; import android.content.ContentValues; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.os.Environment; import android.preference.PreferenceManager; import android.provider.MediaStore; import android.text.Editable; import android.text.TextWatcher; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import com.teamfrugal.budgetapp.R; import com.teamfrugal.budgetapp.database.DataAccess; import com.teamfrugal.budgetapp.database.ListContent; import com.teamfrugal.budgetapp.database.SQLiteHelper; import com.teamfrugal.budgetapp.ui.quote.ListActivity; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.Switch; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.Toast; import static com.teamfrugal.budgetapp.ui.SettingsActivity.KEY_IMG_MODE; public class AddTransactionActivity extends Activity implements OnItemSelectedListener { private DataAccess mDataAccess; private String mItemSelected; private boolean isExpense; private Switch mySwitch; String expenseName = "expense"; private double amount = 0.00; @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_transaction); //Switch element mySwitch = (Switch) findViewById(R.id.switch1); mySwitch.setChecked(true); // Spinner element final Spinner spinner = (Spinner) findViewById(R.id.type_spinner); // Spinner click listener spinner.setOnItemSelectedListener(this); // Spinner Drop down elements List<String> expenseCategories = new ArrayList<String>(); expenseCategories.add("Food/Groceries"); expenseCategories.add("Gas/Travel"); expenseCategories.add("Housing"); expenseCategories.add("Utilities"); expenseCategories.add("Healthcare"); expenseCategories.add("Education"); expenseCategories.add("Personal"); expenseCategories.add("Entertainment"); expenseCategories.add("Debt"); List<String> incomeCategories = new ArrayList<String>(); incomeCategories.add("Income"); // Creating adapter for spinner final ArrayAdapter<String> dataAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, expenseCategories); final ArrayAdapter<String> dataAdapter2 = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, incomeCategories); // Drop down layout style - list view with radio button dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); // attaching data adapter to spinner spinner.setAdapter(dataAdapter); mySwitch.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked){ isExpense = true; expenseName = "expense"; spinner.setAdapter(dataAdapter); }else{ isExpense = false; expenseName = "income"; spinner.setAdapter(dataAdapter2); } } }); final EditText accountBox = (EditText) findViewById(R.id.amountText); accountBox.setText(ListContent.newest.amount+""); amount = ListContent.newest.amount; TextWatcher textWatcher = new TextWatcher() { public void afterTextChanged(Editable s) { amount = Double.parseDouble(accountBox.getText().toString()); } public void beforeTextChanged(CharSequence s, int start, int count, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { } }; accountBox.addTextChangedListener(textWatcher); Button add = (Button) findViewById(R.id.add); add.setOnClickListener(new View.OnClickListener() { public void onClick(View arg0) { mDataAccess = new DataAccess(getApplicationContext()); mDataAccess.open(); //Transaction newTransaction = mDataAccess.newTransact(ListContent.newest.store, ListContent.newest.amount, "acct", mItemSelected , "type", "date"); ContentValues ins = new ContentValues(); ins.put(SQLiteHelper.COLUMN_transID, ListContent.newest.id); ins.put(SQLiteHelper.COLUMN_name, ListContent.newest.store); ins.put(SQLiteHelper.COLUMN_amount, amount); ins.put(SQLiteHelper.COLUMN_account, "a"); ins.put(SQLiteHelper.COLUMN_category, mItemSelected); ins.put(SQLiteHelper.COLUMN_type, expenseName); SimpleDateFormat sdf = new SimpleDateFormat(("yyyy/MM/dd HH:mm:ss")); String date = sdf.format(new Date()); ins.put(SQLiteHelper.COLUMN_datetime, date); mDataAccess.getDatabase().insert(SQLiteHelper.TABLE_TRANSACTION, null, ins); // final String SQL_ADD = "INSERT INTO transactionA Values (" + ListContent.newest.id + ", '" + ListContent.newest.store + "', '" + amount // + "', " + "'a', '" + mItemSelected + "', '" + expenseName + "', DATETIME('now', 'localtime') );"; // mDataAccess.getDatabase().execSQL(SQL_ADD); //System.out.println("item added to db"); mDataAccess.close(); // store image if image saving setting is selected SharedPreferences mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()); int mode2 = Integer.parseInt(mSharedPreferences.getString(KEY_IMG_MODE, "0")); System.out.println("MODE22222222 ------- " + mode2); if (mode2 == 1) { try { InputStream file = new FileInputStream(Environment.getExternalStorageDirectory() + "/image.jpeg"); File dir = new File(Environment.getExternalStorageDirectory() + "/Receipts/"); File out = new File(Environment.getExternalStorageDirectory() + "/Receipts/"+ ListContent.newest.id + ".jpeg"); dir.mkdirs(); out.createNewFile(); OutputStream output = new FileOutputStream(out, false); byte[] buffer = new byte[1024]; int length; while ((length = file.read(buffer)) > 0) { output.write(buffer, 0, length); } file.close(); output.close(); } catch (IOException e) { e.printStackTrace(); } } //Context context = getApplicationContext(); //finish(); getApplicationContext().startActivity(new Intent(getApplicationContext(), ListActivity.class)); finish(); } }); } @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { // On selecting a spinner item String item = parent.getItemAtPosition(position).toString(); // Showing selected spinner item Toast.makeText(parent.getContext(), "Selected: " + item, Toast.LENGTH_LONG).show(); mItemSelected = item; } public void onNothingSelected(AdapterView<?> arg0) { //nothing happens??? } }
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.mojo.jspc.compiler.tomcat6; import java.io.BufferedReader; import java.io.CharArrayWriter; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Reader; import java.io.Writer; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.StringTokenizer; import java.util.Vector; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import javax.servlet.jsp.tagext.TagLibraryInfo; import org.apache.jasper.JasperException; import org.apache.jasper.JspC; import org.apache.jasper.JspCompilationContext; import org.apache.jasper.Options; import org.apache.jasper.compiler.Compiler; import org.apache.jasper.compiler.JspConfig; import org.apache.jasper.compiler.JspRuntimeContext; import org.apache.jasper.compiler.Localizer; import org.apache.jasper.compiler.TagPluginManager; import org.apache.jasper.compiler.TldLocationsCache; import org.apache.jasper.servlet.JspCServletContext; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tools.ant.AntClassLoader; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; import org.apache.tools.ant.util.FileUtils; /** * Shell for the jspc compiler. Handles all options associated with the * command line and creates compilation contexts which it then compiles * according to the specified options. * * This version can process files from a _single_ webapp at once, i.e. * a single docbase can be specified. * * It can be used as an Ant task using: * <pre> * &lt;taskdef classname="org.apache.jasper.JspC" name="jasper" &gt; * &lt;classpath&gt; * &lt;pathelement location="${java.home}/../lib/tools.jar"/&gt; * &lt;fileset dir="${ENV.CATALINA_HOME}/lib"&gt; * &lt;include name="*.jar"/&gt; * &lt;/fileset&gt; * &lt;path refid="myjars"/&gt; * &lt;/classpath&gt; * &lt;/taskdef&gt; * * &lt;jasper verbose="0" * package="my.package" * uriroot="${webapps.dir}/${webapp.name}" * webXmlFragment="${build.dir}/generated_web.xml" * outputDir="${webapp.dir}/${webapp.name}/WEB-INF/src/my/package" /&gt; * </pre> * * @author Danno Ferrin * @author Pierre Delisle * @author Costin Manolache * @author Yoav Shapira */ public class MultiThreadedJspC extends JspC { // Logger private static final Log log = LogFactory.getLog(MultiThreadedJspC.class); private int threads = 1; private long compilationTimeout = TimeUnit.MINUTES.toMinutes(30); public int getThreads() { return threads; } public void setThreads(int threads) { this.threads = threads; } public long getCompilationTimeout() { return compilationTimeout; } public void setCompilationTimeout(long compilationTimeoutMinutes) { this.compilationTimeout = compilationTimeoutMinutes; } /** * Executes the compilation. * * @throws JasperException If an error occurs */ @Override public void execute() { if(log.isDebugEnabled()) { log.debug("execute() starting for " + pages.size() + " pages."); } try { if (uriRoot == null) { if( pages.size() == 0 ) { throw new JasperException( Localizer.getMessage("jsp.error.jspc.missingTarget")); } String firstJsp = pages.get( 0 ).toString(); File firstJspF = new File( firstJsp ); if (!firstJspF.exists()) { throw new JasperException( Localizer.getMessage("jspc.error.fileDoesNotExist", firstJsp)); } locateUriRoot( firstJspF ); } if (uriRoot == null) { throw new JasperException( Localizer.getMessage("jsp.error.jspc.no_uriroot")); } File uriRootF = new File(uriRoot); if (!uriRootF.isDirectory()) { throw new JasperException( Localizer.getMessage("jsp.error.jspc.uriroot_not_dir")); } if(context == null) { initServletContext(); } // No explicit pages, we'll process all .jsp in the webapp if (pages.size() == 0) { scanFiles(uriRootF); } initWebXml(); log.info("compiling with " + getThreads() + " threads"); ExecutorService executor = Executors.newFixedThreadPool(getThreads()); final List<JasperException> errorCollector = Collections.synchronizedList(new ArrayList<JasperException>()); for (Object p : pages) { String nextjsp = p.toString(); File fjsp = new File(nextjsp); if (!fjsp.isAbsolute()) { fjsp = new File(uriRootF, nextjsp); } if (!fjsp.exists()) { if (log.isWarnEnabled()) { log.warn (Localizer.getMessage ("jspc.error.fileDoesNotExist", fjsp.toString())); } continue; } String s = fjsp.getAbsolutePath(); if (s.startsWith(uriRoot)) { nextjsp = s.substring(uriRoot.length()); } if (nextjsp.startsWith("." + File.separatorChar)) { nextjsp = nextjsp.substring(2); } final String jspToCompile = nextjsp; executor.execute(new Runnable() { public void run() { try { processFile(jspToCompile); } catch (JasperException je) { errorCollector.add(je); } } }); } executor.shutdown(); executor.awaitTermination(compilationTimeout, TimeUnit.MILLISECONDS); if (errorCollector.size() > 0) { throwBuildException(errorCollector); } completeWebXml(); if (addWebXmlMappings) { mergeIntoWebXml(); } } catch (IOException ioe) { throw new BuildException(ioe); } catch (JasperException je) { throwBuildException(Arrays.asList(je)); } catch (InterruptedException e) { throw new BuildException(e); } finally { if (loader != null) { LogFactory.release(loader); } } } private void throwBuildException(List<JasperException> errorCollector) { StringBuilder errOut = new StringBuilder(); for (JasperException je : errorCollector) { Throwable rootCause = je; while (rootCause instanceof JasperException && ((JasperException) rootCause).getRootCause() != null) { rootCause = ((JasperException) rootCause).getRootCause(); } if (rootCause != errorCollector) { rootCause.printStackTrace(); } errOut.append(rootCause.getMessage()).append('\n'); } // throw exception with first error encountered as cause, but all messages throw new BuildException(errOut.toString(), errorCollector.get(0)); } }
package io.jrevolt.launcher.util; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Base64; /** * @author <a href="mailto:patrikbeno@gmail.com">Patrik Beno</a> */ public abstract class Base64Support { static private boolean isBase64SupportAvailable; private static final int MIMELINEMAX = 76; private static final byte[] CRLF = new byte[]{'\r', '\n'}; static { try { Class.forName("java.util.Base64"); isBase64SupportAvailable = true; } catch (ClassNotFoundException ignore) { } } public static IEncoder getMimeEncoder() { return isBase64SupportAvailable ? new Java8Encoder(true) : new EncoderFallback(true); } public static IDecoder getMimeDecoder() { return isBase64SupportAvailable ? new Java8Decoder(true) : new DecoderFallback(true); } public static IEncoder getEncoder() { return isBase64SupportAvailable ? new Java8Encoder(false) : new EncoderFallback(false); } public static IDecoder getDecoder() { return isBase64SupportAvailable ? new Java8Decoder(false) : new DecoderFallback(false); } static public interface IEncoder { byte[] encode(byte[] src); String encodeToString(byte[] src); } static public interface IDecoder { byte[] decode(byte[] src); byte[] decode(String src); } static private class Java8Encoder implements IEncoder { Base64.Encoder delegate; public Java8Encoder(boolean isMIME) { delegate = isMIME ? Base64.getMimeEncoder() : Base64.getEncoder(); } @Override public byte[] encode(byte[] src) { return delegate.encode(src); } @Override public String encodeToString(byte[] src) { return delegate.encodeToString(src); } } static private class Java8Decoder implements IDecoder { Base64.Decoder delegate; public Java8Decoder(boolean isMIME) { delegate = isMIME ? Base64.getMimeDecoder() : Base64.getDecoder(); } @Override public byte[] decode(byte[] src) { return delegate.decode(src); } @Override public byte[] decode(String src) { return delegate.decode(src); } } /** * extracted from JDK8 * <p/> * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. DO NOT ALTER OR REMOVE COPYRIGHT * NOTICES OR THIS FILE HEADER. * <p/> * This code is free software; you can redistribute it and/or modify it under the terms of the GNU General Public * License version 2 only, as published by the Free Software Foundation. Oracle designates this particular file as * subject to the "Classpath" exception as provided by Oracle in the LICENSE file that accompanied this code. * <p/> * This code is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License version 2 for * more details (a copy is included in the LICENSE file that accompanied this code). * <p/> * You should have received a copy of the GNU General Public License version 2 along with this work; if not, write to * the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * <p/> * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA or visit www.oracle.com if you need * additional information or have any questions. * * @author Xueming Shen * @see java.util.Base64 * @see http://hg.openjdk.java.net/jdk8u/jdk8u/jdk/file/tip/src/share/classes/java/util/Base64.java * @see http://openjdk.java.net/legal/gplv2+ce.html */ public static class EncoderFallback implements IEncoder { private final byte[] newline; private final int linemax; private final boolean doPadding = true; private EncoderFallback(boolean isMIME) { this.newline = isMIME ? CRLF : null; this.linemax = isMIME ? MIMELINEMAX : -1; } private static final char[] toBase64 = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/' }; private int outLength(int srclen) { int len = 0; if (doPadding) { len = 4 * ((srclen + 2) / 3); } else { int n = srclen % 3; len = 4 * (srclen / 3) + (n == 0 ? 0 : n + 1); } if (linemax > 0) // line separators { len += (len - 1) / linemax * newline.length; } return len; } public byte[] encode(byte[] src) { int len = outLength(src.length); // dst array size byte[] dst = new byte[len]; int ret = encode0(src, 0, src.length, dst); if (ret != dst.length) { return Arrays.copyOf(dst, ret); } return dst; } @SuppressWarnings("deprecation") public String encodeToString(byte[] src) { byte[] encoded = encode(src); return new String(encoded, 0, 0, encoded.length); } private int encode0(byte[] src, int off, int end, byte[] dst) { char[] base64 = toBase64; int sp = off; int slen = (end - off) / 3 * 3; int sl = off + slen; if (linemax > 0 && slen > linemax / 4 * 3) { slen = linemax / 4 * 3; } int dp = 0; while (sp < sl) { int sl0 = Math.min(sp + slen, sl); for (int sp0 = sp, dp0 = dp; sp0 < sl0; ) { int bits = (src[sp0++] & 0xff) << 16 | (src[sp0++] & 0xff) << 8 | (src[sp0++] & 0xff); dst[dp0++] = (byte) base64[(bits >>> 18) & 0x3f]; dst[dp0++] = (byte) base64[(bits >>> 12) & 0x3f]; dst[dp0++] = (byte) base64[(bits >>> 6) & 0x3f]; dst[dp0++] = (byte) base64[bits & 0x3f]; } int dlen = (sl0 - sp) / 3 * 4; dp += dlen; sp = sl0; if (dlen == linemax && sp < end) { for (byte b : newline) { dst[dp++] = b; } } } if (sp < end) { // 1 or 2 leftover bytes int b0 = src[sp++] & 0xff; dst[dp++] = (byte) base64[b0 >> 2]; if (sp == end) { dst[dp++] = (byte) base64[(b0 << 4) & 0x3f]; if (doPadding) { dst[dp++] = '='; dst[dp++] = '='; } } else { int b1 = src[sp++] & 0xff; dst[dp++] = (byte) base64[(b0 << 4) & 0x3f | (b1 >> 4)]; dst[dp++] = (byte) base64[(b1 << 2) & 0x3f]; if (doPadding) { dst[dp++] = '='; } } } return dp; } } /** * extracted from JDK8 * <p/> * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. DO NOT ALTER OR REMOVE COPYRIGHT * NOTICES OR THIS FILE HEADER. * <p/> * This code is free software; you can redistribute it and/or modify it under the terms of the GNU General Public * License version 2 only, as published by the Free Software Foundation. Oracle designates this particular file as * subject to the "Classpath" exception as provided by Oracle in the LICENSE file that accompanied this code. * <p/> * This code is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License version 2 for * more details (a copy is included in the LICENSE file that accompanied this code). * <p/> * You should have received a copy of the GNU General Public License version 2 along with this work; if not, write to * the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * <p/> * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA or visit www.oracle.com if you need * additional information or have any questions. * * @author Xueming Shen * @see java.util.Base64 * @see http://hg.openjdk.java.net/jdk8u/jdk8u/jdk/file/tip/src/share/classes/java/util/Base64.java * @see http://openjdk.java.net/legal/gplv2+ce.html */ public static class DecoderFallback implements IDecoder { private final boolean isMIME; private DecoderFallback(boolean isMIME) { this.isMIME = isMIME; } private static final int[] fromBase64 = new int[256]; static { Arrays.fill(fromBase64, -1); for (int i = 0; i < EncoderFallback.toBase64.length; i++) { fromBase64[EncoderFallback.toBase64[i]] = i; } fromBase64['='] = -2; } public byte[] decode(byte[] src) { byte[] dst = new byte[outLength(src, 0, src.length)]; int ret = decode0(src, 0, src.length, dst); if (ret != dst.length) { dst = Arrays.copyOf(dst, ret); } return dst; } public byte[] decode(String src) { return decode(src.getBytes(StandardCharsets.ISO_8859_1)); } private int outLength(byte[] src, int sp, int sl) { int paddings = 0; int len = sl - sp; if (len == 0) { return 0; } if (len < 2) { if (isMIME && fromBase64[0] == -1) { return 0; } throw new IllegalArgumentException( "Input byte[] should at least have 2 bytes for base64 bytes"); } if (isMIME) { // scan all bytes to fill out all non-alphabet. a performance // trade-off of pre-scan or Arrays.copyOf int n = 0; while (sp < sl) { int b = src[sp++] & 0xff; if (b == '=') { len -= (sl - sp + 1); break; } if ((b = fromBase64[b]) == -1) { n++; } } len -= n; } else { if (src[sl - 1] == '=') { paddings++; if (src[sl - 2] == '=') { paddings++; } } } if (paddings == 0 && (len & 0x3) != 0) { paddings = 4 - (len & 0x3); } return 3 * ((len + 3) / 4) - paddings; } private int decode0(byte[] src, int sp, int sl, byte[] dst) { int dp = 0; int bits = 0; int shiftto = 18; // pos of first byte of 4-byte atom while (sp < sl) { int b = src[sp++] & 0xff; if ((b = fromBase64[b]) < 0) { if (b == -2) { // padding byte '=' // = shiftto==18 unnecessary padding // x= shiftto==12 a dangling single x // x to be handled together with non-padding case // xx= shiftto==6&&sp==sl missing last = // xx=y shiftto==6 last is not = if (shiftto == 6 && (sp == sl || src[sp++] != '=') || shiftto == 18) { throw new IllegalArgumentException( "Input byte array has wrong 4-byte ending unit"); } break; } if (isMIME) // skip if for rfc2045 { continue; } else { throw new IllegalArgumentException( "Illegal base64 character " + Integer.toString(src[sp - 1], 16)); } } bits |= (b << shiftto); shiftto -= 6; if (shiftto < 0) { dst[dp++] = (byte) (bits >> 16); dst[dp++] = (byte) (bits >> 8); dst[dp++] = (byte) (bits); shiftto = 18; bits = 0; } } // reached end of byte array or hit padding '=' characters. if (shiftto == 6) { dst[dp++] = (byte) (bits >> 16); } else if (shiftto == 0) { dst[dp++] = (byte) (bits >> 16); dst[dp++] = (byte) (bits >> 8); } else if (shiftto == 12) { // dangling single "x", incorrectly encoded. throw new IllegalArgumentException( "Last unit does not have enough valid bits"); } // anything left is invalid, if is not MIME. // if MIME, ignore all non-base64 character while (sp < sl) { if (isMIME && fromBase64[src[sp++]] < 0) { continue; } throw new IllegalArgumentException( "Input byte array has incorrect ending byte at " + sp); } return dp; } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.directory.model; import java.io.Serializable; /** * <p> * Contains information for the <a>CreateDirectory</a> operation when a Simple * AD directory is being created. * </p> */ public class DirectoryVpcSettings implements Serializable, Cloneable { /** * <p> * The identifier of the VPC to create the Simple AD directory in. * </p> */ private String vpcId; /** * <p> * The identifiers of the subnets for the directory servers. The two subnets * must be in different Availability Zones. AWS Directory Service creates a * directory server and a DNS server in each of these subnets. * </p> */ private com.amazonaws.internal.SdkInternalList<String> subnetIds; /** * <p> * The identifier of the VPC to create the Simple AD directory in. * </p> * * @param vpcId * The identifier of the VPC to create the Simple AD directory in. */ public void setVpcId(String vpcId) { this.vpcId = vpcId; } /** * <p> * The identifier of the VPC to create the Simple AD directory in. * </p> * * @return The identifier of the VPC to create the Simple AD directory in. */ public String getVpcId() { return this.vpcId; } /** * <p> * The identifier of the VPC to create the Simple AD directory in. * </p> * * @param vpcId * The identifier of the VPC to create the Simple AD directory in. * @return Returns a reference to this object so that method calls can be * chained together. */ public DirectoryVpcSettings withVpcId(String vpcId) { setVpcId(vpcId); return this; } /** * <p> * The identifiers of the subnets for the directory servers. The two subnets * must be in different Availability Zones. AWS Directory Service creates a * directory server and a DNS server in each of these subnets. * </p> * * @return The identifiers of the subnets for the directory servers. The two * subnets must be in different Availability Zones. AWS Directory * Service creates a directory server and a DNS server in each of * these subnets. */ public java.util.List<String> getSubnetIds() { if (subnetIds == null) { subnetIds = new com.amazonaws.internal.SdkInternalList<String>(); } return subnetIds; } /** * <p> * The identifiers of the subnets for the directory servers. The two subnets * must be in different Availability Zones. AWS Directory Service creates a * directory server and a DNS server in each of these subnets. * </p> * * @param subnetIds * The identifiers of the subnets for the directory servers. The two * subnets must be in different Availability Zones. AWS Directory * Service creates a directory server and a DNS server in each of * these subnets. */ public void setSubnetIds(java.util.Collection<String> subnetIds) { if (subnetIds == null) { this.subnetIds = null; return; } this.subnetIds = new com.amazonaws.internal.SdkInternalList<String>( subnetIds); } /** * <p> * The identifiers of the subnets for the directory servers. The two subnets * must be in different Availability Zones. AWS Directory Service creates a * directory server and a DNS server in each of these subnets. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSubnetIds(java.util.Collection)} or * {@link #withSubnetIds(java.util.Collection)} if you want to override the * existing values. * </p> * * @param subnetIds * The identifiers of the subnets for the directory servers. The two * subnets must be in different Availability Zones. AWS Directory * Service creates a directory server and a DNS server in each of * these subnets. * @return Returns a reference to this object so that method calls can be * chained together. */ public DirectoryVpcSettings withSubnetIds(String... subnetIds) { if (this.subnetIds == null) { setSubnetIds(new com.amazonaws.internal.SdkInternalList<String>( subnetIds.length)); } for (String ele : subnetIds) { this.subnetIds.add(ele); } return this; } /** * <p> * The identifiers of the subnets for the directory servers. The two subnets * must be in different Availability Zones. AWS Directory Service creates a * directory server and a DNS server in each of these subnets. * </p> * * @param subnetIds * The identifiers of the subnets for the directory servers. The two * subnets must be in different Availability Zones. AWS Directory * Service creates a directory server and a DNS server in each of * these subnets. * @return Returns a reference to this object so that method calls can be * chained together. */ public DirectoryVpcSettings withSubnetIds( java.util.Collection<String> subnetIds) { setSubnetIds(subnetIds); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVpcId() != null) sb.append("VpcId: " + getVpcId() + ","); if (getSubnetIds() != null) sb.append("SubnetIds: " + getSubnetIds()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DirectoryVpcSettings == false) return false; DirectoryVpcSettings other = (DirectoryVpcSettings) obj; if (other.getVpcId() == null ^ this.getVpcId() == null) return false; if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false) return false; if (other.getSubnetIds() == null ^ this.getSubnetIds() == null) return false; if (other.getSubnetIds() != null && other.getSubnetIds().equals(this.getSubnetIds()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode()); hashCode = prime * hashCode + ((getSubnetIds() == null) ? 0 : getSubnetIds().hashCode()); return hashCode; } @Override public DirectoryVpcSettings clone() { try { return (DirectoryVpcSettings) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.mesos; import alluxio.conf.ServerConfiguration; import alluxio.Constants; import alluxio.conf.PropertyKey; import alluxio.conf.Source.Type; import alluxio.util.FormatUtils; import alluxio.util.io.PathUtils; import com.google.common.base.Joiner; import org.apache.mesos.Protos; import org.apache.mesos.Protos.CommandInfo; import org.apache.mesos.Protos.CommandInfo.URI; import org.apache.mesos.Scheduler; import org.apache.mesos.SchedulerDriver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Class for responding to Mesos offers to launch Alluxio on Mesos. * * @deprecated since version 2.0 */ @Deprecated public class AlluxioScheduler implements Scheduler { private static final Logger LOG = LoggerFactory.getLogger(AlluxioScheduler.class); private final String mRequiredMasterHostname; private boolean mMasterLaunched = false; private String mMasterHostname = ""; private String mTaskName = ""; private int mMasterTaskId; private Set<String> mWorkers = new HashSet<String>(); int mLaunchedTasks = 0; int mMasterCount = 0; /** * Creates a new {@link AlluxioScheduler}. * * @param requiredMasterHostname hostname to launch the Alluxio master on; if this is null any * host may be used */ public AlluxioScheduler(String requiredMasterHostname) { mRequiredMasterHostname = requiredMasterHostname; } @Override public void disconnected(SchedulerDriver driver) { LOG.info("Disconnected from master"); } @Override public void error(SchedulerDriver driver, String message) { LOG.error("Error: {}", message); } @Override public void executorLost(SchedulerDriver driver, Protos.ExecutorID executorId, Protos.SlaveID slaveId, int status) { LOG.info("Executor {} was lost", executorId.getValue()); } @Override public void frameworkMessage(SchedulerDriver driver, Protos.ExecutorID executorId, Protos.SlaveID slaveId, byte[] data) { LOG.info("Executor: {}, slave: {}, data: {}", executorId.getValue(), slaveId.getValue(), Arrays.toString(data)); } @Override public void offerRescinded(SchedulerDriver driver, Protos.OfferID offerId) { LOG.info("Offered {} rescinded", offerId.getValue()); } @Override public void registered(final SchedulerDriver driver, Protos.FrameworkID frameworkId, Protos.MasterInfo masterInfo) { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { driver.stop(); } })); LOG.info("Registered framework {} with master {}:{}", frameworkId.getValue(), masterInfo.getHostname(), masterInfo.getPort()); } @Override public void reregistered(SchedulerDriver driver, Protos.MasterInfo masterInfo) { LOG.info("Registered framework with master {}:{}", masterInfo.getHostname(), masterInfo.getPort()); } @Override public void resourceOffers(SchedulerDriver driver, List<Protos.Offer> offers) { long masterCpu = ServerConfiguration.getInt(PropertyKey.INTEGRATION_MASTER_RESOURCE_CPU); long masterMem = ServerConfiguration.getBytes(PropertyKey.INTEGRATION_MASTER_RESOURCE_MEM) / Constants.MB; long workerCpu = ServerConfiguration.getInt(PropertyKey.INTEGRATION_WORKER_RESOURCE_CPU); long workerOverheadMem = ServerConfiguration.getBytes(PropertyKey.INTEGRATION_WORKER_RESOURCE_MEM) / Constants.MB; long ramdiskMem = ServerConfiguration.getBytes(PropertyKey.WORKER_MEMORY_SIZE) / Constants.MB; LOG.info("Master launched {}, master count {}, " + "requested master cpu {} mem {} MB and required master hostname {}", mMasterLaunched, mMasterCount, masterCpu, masterMem, mRequiredMasterHostname); for (Protos.Offer offer : offers) { Protos.Offer.Operation.Launch.Builder launch = Protos.Offer.Operation.Launch.newBuilder(); double offerCpu = 0; double offerMem = 0; for (Protos.Resource resource : offer.getResourcesList()) { if (resource.getName().equals(Constants.MESOS_RESOURCE_CPUS)) { offerCpu += resource.getScalar().getValue(); } else if (resource.getName().equals(Constants.MESOS_RESOURCE_MEM)) { offerMem += resource.getScalar().getValue(); } else { // Other resources are currently ignored. } } LOG.info("Received offer {} on host {} with cpus {} and mem {} MB and hasMasterPorts {}", offer.getId().getValue(), offer.getHostname(), offerCpu, offerMem, OfferUtils.hasAvailableMasterPorts(offer)); Protos.ExecutorInfo.Builder executorBuilder = Protos.ExecutorInfo.newBuilder(); List<Protos.Resource> resources; if (!mMasterLaunched && offerCpu >= masterCpu && offerMem >= masterMem && mMasterCount < ServerConfiguration .getInt(PropertyKey.INTEGRATION_MESOS_ALLUXIO_MASTER_NODE_COUNT) && OfferUtils.hasAvailableMasterPorts(offer) && (mRequiredMasterHostname == null || mRequiredMasterHostname.equals(offer.getHostname()))) { LOG.debug("Creating Alluxio Master executor"); executorBuilder .setName("Alluxio Master Executor") .setSource("master") .setExecutorId(Protos.ExecutorID.newBuilder().setValue("master")) .addAllResources(getExecutorResources()) .setCommand( Protos.CommandInfo .newBuilder() .setValue(createStartAlluxioCommand("alluxio-master-mesos.sh")) .addAllUris(getExecutorDependencyURIList()) .setEnvironment( Protos.Environment .newBuilder() .addVariables( Protos.Environment.Variable.newBuilder() .setName("ALLUXIO_MASTER_MOUNT_TABLE_ROOT_UFS") .setValue(ServerConfiguration .get(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS)) .build()) .addVariables( Protos.Environment.Variable.newBuilder() .setName("ALLUXIO_MESOS_SITE_PROPERTIES_CONTENT") .setValue(createAlluxioSiteProperties()) .build()) .build())); // pre-build resource list here, then use it to build Protos.Task later. resources = getMasterRequiredResources(masterCpu, masterMem); mMasterHostname = offer.getHostname(); mTaskName = ServerConfiguration.get(PropertyKey.INTEGRATION_MESOS_ALLUXIO_MASTER_NAME); mMasterCount++; mMasterTaskId = mLaunchedTasks; } else if (mMasterLaunched && !mWorkers.contains(offer.getHostname()) && offerCpu >= workerCpu && offerMem >= (ramdiskMem + workerOverheadMem) && OfferUtils.hasAvailableWorkerPorts(offer)) { LOG.debug("Creating Alluxio Worker executor"); final String memSize = FormatUtils.getSizeFromBytes((long) ramdiskMem * Constants.MB); executorBuilder .setName("Alluxio Worker Executor") .setSource("worker") .setExecutorId(Protos.ExecutorID.newBuilder().setValue("worker")) .addAllResources(getExecutorResources()) .setCommand( Protos.CommandInfo .newBuilder() .setValue(createStartAlluxioCommand("alluxio-worker-mesos.sh")) .addAllUris(getExecutorDependencyURIList()) .setEnvironment( Protos.Environment .newBuilder() .addVariables( Protos.Environment.Variable.newBuilder() .setName("ALLUXIO_MASTER_HOSTNAME").setValue(mMasterHostname) .build()) .addVariables( Protos.Environment.Variable.newBuilder() .setName("ALLUXIO_WORKER_MEMORY_SIZE").setValue(memSize) .build()) .addVariables( Protos.Environment.Variable.newBuilder() .setName("ALLUXIO_MASTER_MOUNT_TABLE_ROOT_UFS") .setValue(ServerConfiguration .get(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS)) .build()) .addVariables( Protos.Environment.Variable.newBuilder() .setName("ALLUXIO_MESOS_SITE_PROPERTIES_CONTENT") .setValue(createAlluxioSiteProperties()) .build()) .build())); // pre-build resource list here, then use it to build Protos.Task later. resources = getWorkerRequiredResources(workerCpu, ramdiskMem + workerOverheadMem); mWorkers.add(offer.getHostname()); mTaskName = ServerConfiguration.get(PropertyKey.INTEGRATION_MESOS_ALLUXIO_WORKER_NAME); } else { // The resource offer cannot be used to start either master or a worker. LOG.info("Declining offer {}", offer.getId().getValue()); driver.declineOffer(offer.getId()); continue; } Protos.TaskID taskId = Protos.TaskID.newBuilder().setValue(String.valueOf(mLaunchedTasks)).build(); LOG.info("Launching task {} using offer {}", taskId.getValue(), offer.getId().getValue()); Protos.TaskInfo task = Protos.TaskInfo .newBuilder() .setName(mTaskName) .setTaskId(taskId) .setSlaveId(offer.getSlaveId()) .addAllResources(resources) .setExecutor(executorBuilder).build(); launch.addTaskInfos(Protos.TaskInfo.newBuilder(task)); mLaunchedTasks++; // NOTE: We use the new API `acceptOffers` here to launch tasks. // The 'launchTasks' API will be deprecated. List<Protos.OfferID> offerIds = new ArrayList<Protos.OfferID>(); offerIds.add(offer.getId()); List<Protos.Offer.Operation> operations = new ArrayList<Protos.Offer.Operation>(); Protos.Offer.Operation operation = Protos.Offer.Operation.newBuilder().setType(Protos.Offer.Operation.Type.LAUNCH) .setLaunch(launch).build(); operations.add(operation); Protos.Filters filters = Protos.Filters.newBuilder().setRefuseSeconds(1).build(); driver.acceptOffers(offerIds, operations, filters); } } private String createAlluxioSiteProperties() { StringBuilder siteProperties = new StringBuilder(); for (PropertyKey key : ServerConfiguration.keySet()) { if (ServerConfiguration.isSet(key) && ServerConfiguration.getSource(key).getType() != Type.DEFAULT) { siteProperties.append(String.format("%s=%s%n", key.getName(), ServerConfiguration.get(key))); } } return siteProperties.toString(); } private static String createStartAlluxioCommand(String command) { List<String> commands = new ArrayList<>(); commands.add(String.format("echo 'Starting Alluxio with %s'", command)); if (installJavaFromUrl()) { commands .add("export JAVA_HOME=" + ServerConfiguration.get(PropertyKey.INTEGRATION_MESOS_JDK_PATH)); commands.add("export PATH=$PATH:$JAVA_HOME/bin"); } commands.add("mkdir conf"); commands.add("touch conf/alluxio-env.sh"); // If a jar is supplied, start Alluxio from the jar. Otherwise assume that Alluxio is already // installed at PropertyKey.HOME. if (installAlluxioFromUrl()) { commands.add("rm *.tar.gz"); // Handle the case where the root directory is named "alluxio" as well as the case where the // root directory is named alluxio-$VERSION. commands.add("mv alluxio* alluxio_tmp"); commands.add("mv alluxio_tmp alluxio"); } String home = installAlluxioFromUrl() ? "alluxio" : ServerConfiguration.get(PropertyKey.HOME); commands .add(String.format("cp %s conf", PathUtils.concatPath(home, "conf", "log4j.properties"))); commands.add(PathUtils.concatPath(home, "integration", "mesos", "bin", command)); return Joiner.on(" && ").join(commands); } @Override public void slaveLost(SchedulerDriver driver, Protos.SlaveID slaveId) { // TODO(jiri): Handle lost Mesos slaves. LOG.info("Executor {} was lost", slaveId.getValue()); } @Override public void statusUpdate(SchedulerDriver driver, Protos.TaskStatus status) { String taskId = status.getTaskId().getValue(); Protos.TaskState state = status.getState(); LOG.info("Task {} is in state {}", taskId, state); // TODO(jiri): Handle the case when an Alluxio master and/or worker task fails. // In particular, we should enable support for the fault tolerant mode of Alluxio to account // for Alluxio master process failures and keep track of the running number of Alluxio // masters. switch (status.getState()) { case TASK_FAILED: // intend to fall through case TASK_LOST: // intend to fall through case TASK_ERROR: if (status.getTaskId().getValue().equals(String.valueOf(mMasterTaskId))) { mMasterCount--; } break; case TASK_RUNNING: if (status.getTaskId().getValue().equals(String.valueOf(mMasterTaskId))) { mMasterLaunched = true; } break; default: break; } } private List<Protos.Resource> getMasterRequiredResources(long masterCpus, long masterMem) { List<Protos.Resource> resources = getCoreRequiredResouces(masterCpus, masterMem); // Set master rcp port, web ui port, data port as range resources for this task. // By default, it would require 19998, 19999 ports for master process. resources.add(Protos.Resource.newBuilder() .setName(Constants.MESOS_RESOURCE_PORTS) .setType(Protos.Value.Type.RANGES) .setRanges(Protos.Value.Ranges.newBuilder() .addRange(Protos.Value.Range.newBuilder() .setBegin(ServerConfiguration.getLong(PropertyKey.MASTER_WEB_PORT)) .setEnd(ServerConfiguration.getLong(PropertyKey.MASTER_WEB_PORT))) .addRange((Protos.Value.Range.newBuilder() .setBegin(ServerConfiguration.getLong(PropertyKey.MASTER_RPC_PORT)) .setEnd(ServerConfiguration.getLong(PropertyKey.MASTER_RPC_PORT))))).build()); return resources; } private List<Protos.Resource> getWorkerRequiredResources(long workerCpus, long workerMem) { List<Protos.Resource> resources = getCoreRequiredResouces(workerCpus, workerMem); // Set worker rcp port, web ui port, data port as range resources for this task. // By default, it would require 29998, 29999, 30000 ports for worker process. resources.add(Protos.Resource.newBuilder() .setName(Constants.MESOS_RESOURCE_PORTS) .setType(Protos.Value.Type.RANGES) .setRanges(Protos.Value.Ranges.newBuilder() .addRange(Protos.Value.Range.newBuilder() .setBegin(ServerConfiguration.getLong(PropertyKey.WORKER_RPC_PORT)) .setEnd(ServerConfiguration.getLong(PropertyKey.WORKER_RPC_PORT))) .addRange((Protos.Value.Range.newBuilder() .setBegin(ServerConfiguration.getLong(PropertyKey.WORKER_WEB_PORT)) .setEnd(ServerConfiguration.getLong(PropertyKey.WORKER_WEB_PORT))))).build()); return resources; } private List<Protos.Resource> getCoreRequiredResouces(long cpus, long mem) { // Build cpu/mem resource for task. List<Protos.Resource> resources = new ArrayList<Protos.Resource>(); resources.add(Protos.Resource.newBuilder() .setName(Constants.MESOS_RESOURCE_CPUS) .setType(Protos.Value.Type.SCALAR) .setScalar(Protos.Value.Scalar.newBuilder().setValue(cpus)).build()); resources.add(Protos.Resource.newBuilder() .setName(Constants.MESOS_RESOURCE_MEM) .setType(Protos.Value.Type.SCALAR) .setScalar(Protos.Value.Scalar.newBuilder().setValue(mem)).build()); return resources; } private List<Protos.Resource> getExecutorResources() { // JIRA: https://issues.apache.org/jira/browse/MESOS-1807 // From Mesos 0.22.0, executors must set CPU resources to at least 0.01 and // memory resources to at least 32MB. List<Protos.Resource> resources = new ArrayList<Protos.Resource>(2); // Both cpus/mem are "scalar" type, which means a double value should be used. // The resource name is "cpus", type is scalar and the value is 0.1 to tell Mesos // this executor would allocate 0.1 cpu for itself. resources.add(Protos.Resource.newBuilder().setName(Constants.MESOS_RESOURCE_CPUS) .setType(Protos.Value.Type.SCALAR) .setScalar(Protos.Value.Scalar.newBuilder().setValue(0.1d)).build()); // The resource name is "mem", type is scalar and the value is 32.0MB to tell Mesos // this executor would allocate 32.0MB mem for itself. resources.add(Protos.Resource.newBuilder().setName(Constants.MESOS_RESOURCE_MEM) .setType(Protos.Value.Type.SCALAR) .setScalar(Protos.Value.Scalar.newBuilder().setValue(32.0d)).build()); return resources; } private static List<CommandInfo.URI> getExecutorDependencyURIList() { List<URI> dependencies = new ArrayList<>(); if (installJavaFromUrl()) { dependencies.add(CommandInfo.URI.newBuilder() .setValue(ServerConfiguration.get(PropertyKey.INTEGRATION_MESOS_JDK_URL)).setExtract(true) .build()); } if (installAlluxioFromUrl()) { dependencies.add(CommandInfo.URI.newBuilder() .setValue(ServerConfiguration.get(PropertyKey.INTEGRATION_MESOS_ALLUXIO_JAR_URL)) .setExtract(true).build()); } return dependencies; } private static boolean installJavaFromUrl() { return ServerConfiguration.isSet(PropertyKey.INTEGRATION_MESOS_JDK_URL) && !ServerConfiguration .get(PropertyKey.INTEGRATION_MESOS_JDK_URL).equalsIgnoreCase(Constants.MESOS_LOCAL_INSTALL); } private static boolean installAlluxioFromUrl() { return ServerConfiguration.isSet(PropertyKey.INTEGRATION_MESOS_ALLUXIO_JAR_URL) && !ServerConfiguration.get(PropertyKey.INTEGRATION_MESOS_ALLUXIO_JAR_URL) .equalsIgnoreCase(Constants.MESOS_LOCAL_INSTALL); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.gradle.model; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.util.*; /** * @author Vladislav.Soroka */ public class DefaultExternalProject implements ExternalProject, ExternalProjectPreview { private static final long serialVersionUID = 1L; @NotNull private String myId; @NotNull private String myName; @NotNull private String myQName; @Nullable private String myDescription; @NotNull private String myGroup; @NotNull private String myVersion; @NotNull private Map<String, ExternalProject> myChildProjects; @NotNull private File myProjectDir; @NotNull private File myBuildDir; @Nullable private File myBuildFile; @NotNull private Map<String, ExternalTask> myTasks; @NotNull private Map<String, ?> myProperties; @NotNull private Map<String, ExternalSourceSet> mySourceSets; @NotNull private String myExternalSystemId; @NotNull private Map<String, ExternalPlugin> myPlugins; @NotNull private List<File> myArtifacts; @NotNull private Map<String, Set<File>> myArtifactsByConfiguration; public DefaultExternalProject() { myChildProjects = new HashMap<String, ExternalProject>(); myTasks = new HashMap<String, ExternalTask>(); myProperties = new HashMap<String, Object>(); mySourceSets = new HashMap<String, ExternalSourceSet>(); myPlugins = new HashMap<String, ExternalPlugin>(); myArtifacts = new ArrayList<File>(); myArtifactsByConfiguration = new HashMap<String, Set<File>>(); } public DefaultExternalProject(@NotNull ExternalProject externalProject) { this(); myId = externalProject.getId(); myName = externalProject.getName(); myQName = externalProject.getQName(); myVersion = externalProject.getVersion(); myGroup = externalProject.getGroup(); myDescription = externalProject.getDescription(); myProjectDir = externalProject.getProjectDir(); myBuildDir = externalProject.getBuildDir(); myBuildFile = externalProject.getBuildFile(); myExternalSystemId = externalProject.getExternalSystemId(); for (Map.Entry<String, ExternalProject> entry : externalProject.getChildProjects().entrySet()) { myChildProjects.put(entry.getKey(), new DefaultExternalProject(entry.getValue())); } for (Map.Entry<String, ExternalTask> entry : externalProject.getTasks().entrySet()) { myTasks.put(entry.getKey(), new DefaultExternalTask(entry.getValue())); } for (Map.Entry<String, ExternalSourceSet> entry : externalProject.getSourceSets().entrySet()) { mySourceSets.put(entry.getKey(), new DefaultExternalSourceSet(entry.getValue())); } for (Map.Entry<String, ExternalPlugin> entry : externalProject.getPlugins().entrySet()) { myPlugins.put(entry.getKey(), new DefaultExternalPlugin(entry.getValue())); } myArtifacts.addAll(externalProject.getArtifacts()); myArtifactsByConfiguration.putAll(externalProject.getArtifactsByConfiguration()); } @NotNull @Override public String getExternalSystemId() { return myExternalSystemId; } @NotNull @Override public String getId() { return myId; } public void setId(@NotNull String id) { myId = id; } public void setExternalSystemId(@NotNull String externalSystemId) { myExternalSystemId = externalSystemId; } @NotNull @Override public String getName() { return myName; } public void setName(@NotNull String name) { myName = name; } @NotNull @Override public String getQName() { return myQName; } public void setQName(@NotNull String QName) { myQName = QName; } @Nullable @Override public String getDescription() { return myDescription; } public void setDescription(@Nullable String description) { myDescription = description; } @NotNull @Override public String getGroup() { return myGroup; } public void setGroup(@NotNull String group) { myGroup = group; } @NotNull @Override public String getVersion() { return myVersion; } public void setVersion(@NotNull String version) { myVersion = version; } @NotNull @Override public Map<String, ExternalProject> getChildProjects() { return myChildProjects; } public void setChildProjects(@NotNull Map<String, ExternalProject> childProjects) { myChildProjects = childProjects; } @NotNull @Override public File getProjectDir() { return myProjectDir; } public void setProjectDir(@NotNull File projectDir) { myProjectDir = projectDir; } @NotNull @Override public File getBuildDir() { return myBuildDir; } public void setBuildDir(@NotNull File buildDir) { myBuildDir = buildDir; } @Nullable @Override public File getBuildFile() { return myBuildFile; } public void setBuildFile(@Nullable File buildFile) { myBuildFile = buildFile; } @NotNull @Override public Map<String, ExternalTask> getTasks() { return myTasks; } public void setTasks(@NotNull Map<String, ExternalTask> tasks) { myTasks = tasks; } @NotNull @Override public Map<String, ExternalPlugin> getPlugins() { return myPlugins; } public void setPlugins(@NotNull Map<String, ExternalPlugin> plugins) { myPlugins = plugins; } @NotNull @Override public Map<String, ?> getProperties() { return myProperties; } public void setProperties(@NotNull Map<String, ?> properties) { myProperties = properties; } @Nullable @Override public Object getProperty(String name) { return myProperties.get(name); } @NotNull @Override public Map<String, ExternalSourceSet> getSourceSets() { return mySourceSets; } public void setSourceSets(@NotNull Map<String, ExternalSourceSet> sourceSets) { mySourceSets = sourceSets; } @NotNull @Override public List<File> getArtifacts() { return myArtifacts; } public void setArtifacts(@NotNull List<File> artifacts) { this.myArtifacts = artifacts; } public void setArtifactsByConfiguration(@NotNull Map<String, Set<File>> artifactsByConfiguration) { myArtifactsByConfiguration = artifactsByConfiguration; } @NotNull @Override public Map<String, Set<File>> getArtifactsByConfiguration() { return myArtifactsByConfiguration; } @Override public String toString() { return "project '" + myId + "'"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package org.apache.polygene.api.common; import org.apache.polygene.api.association.Association; import org.apache.polygene.api.composite.TransientBuilder; import org.apache.polygene.api.composite.TransientComposite; import org.apache.polygene.api.constraint.ConstraintViolationException; import org.apache.polygene.api.entity.EntityBuilder; import org.apache.polygene.api.entity.EntityComposite; import org.apache.polygene.api.mixin.Mixins; import org.apache.polygene.api.property.Property; import org.apache.polygene.api.unitofwork.UnitOfWork; import org.apache.polygene.bootstrap.AssemblyException; import org.apache.polygene.bootstrap.ModuleAssembly; import org.apache.polygene.test.AbstractPolygeneTest; import org.apache.polygene.test.EntityTestAssembler; import org.junit.jupiter.api.Test; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertThrows; /** * Tests for @Optional */ public class OptionalTest extends AbstractPolygeneTest { public void assemble( ModuleAssembly module ) { module.transients( TestComposite.class ); module.transients( TestComposite2.class ); module.entities( TestComposite3.class, TestComposite4.class ); new EntityTestAssembler().assemble( module ); } @Test public void givenOptionalMethodWhenCorrectInvokeThenNoException() { TestComposite instance = transientBuilderFactory.newTransient( TestComposite.class ); instance.doStuff( "Hello WOrld", "Hello World" ); } @Test public void givenOptionalMethodWhenMandatoryMissingThenException() { assertThrows( ConstraintViolationException.class, () -> { TestComposite instance = transientBuilderFactory.newTransient( TestComposite.class ); instance.doStuff( "Hello World", null ); } ); } @Test public void givenOptionalMethodWhenOptionalMissingThenNoException() { TestComposite instance = transientBuilderFactory.newTransient( TestComposite.class ); instance.doStuff( null, "Hello World" ); } @Test public void givenOptionalPropertyWhenOptionalMissingThenNoException() { TransientBuilder<TestComposite2> builder = transientBuilderFactory.newTransientBuilder( TestComposite2.class ); builder.prototype().mandatoryProperty().set( "Hello World" ); TestComposite2 testComposite2 = builder.newInstance(); } @Test public void givenOptionalPropertyWhenOptionalSetThenNoException() { TransientBuilder<TestComposite2> builder = transientBuilderFactory.newTransientBuilder( TestComposite2.class ); builder.prototype().mandatoryProperty().set( "Hello World" ); builder.prototype().optionalProperty().set( "Hello World" ); TestComposite2 testComposite2 = builder.newInstance(); } @Test public void givenMandatoryPropertyWhenMandatoryMissingThenException() { assertThrows( ConstraintViolationException.class, () -> transientBuilderFactory.newTransient( TestComposite2.class ) ); } @Test public void givenMandatoryPropertyWhenSettingPropertyToNullOnBuiltInstanceThenException() { TransientBuilder<TestComposite2> builder = transientBuilderFactory.newTransientBuilder( TestComposite2.class ); builder.prototype().mandatoryProperty().set( "Hello" ); builder.prototype().optionalProperty().set( "World" ); TestComposite2 testComposite2 = builder.newInstance(); testComposite2.optionalProperty().set( null ); assertThrows( ConstraintViolationException.class, () -> { testComposite2.mandatoryProperty().set( null ); } ); } @Test public void givenOptionalAssociationWhenOptionalMissingThenNoException() throws Exception { UnitOfWork unitOfWork = unitOfWorkFactory.newUnitOfWork(); try { TestComposite4 ref = unitOfWork.newEntity( TestComposite4.class ); EntityBuilder<TestComposite3> builder = unitOfWork.newEntityBuilder( TestComposite3.class ); builder.instance().mandatoryAssociation().set( ref ); TestComposite3 testComposite3 = builder.newInstance(); unitOfWork.complete(); } finally { unitOfWork.discard(); } } @Test public void givenOptionalAssociationWhenOptionalSetThenNoException() throws Exception { UnitOfWork unitOfWork = unitOfWorkFactory.newUnitOfWork(); try { TestComposite4 ref = unitOfWork.newEntity( TestComposite4.class ); EntityBuilder<TestComposite3> builder = unitOfWork.newEntityBuilder( TestComposite3.class ); builder.instance().mandatoryAssociation().set( ref ); builder.instance().optionalAssociation().set( ref ); TestComposite3 testComposite3 = builder.newInstance(); unitOfWork.complete(); } finally { unitOfWork.discard(); } } @Test public void givenMandatoryAssociationWhenMandatoryMissingThenException() throws Exception { assertThrows( ConstraintViolationException.class, () -> { UnitOfWork unitOfWork = unitOfWorkFactory.newUnitOfWork(); try { TestComposite4 ref = unitOfWork.newEntity( TestComposite4.class ); EntityBuilder<TestComposite3> builder = unitOfWork.newEntityBuilder( TestComposite3.class ); builder.instance().optionalAssociation().set( ref ); TestComposite3 testComposite3 = builder.newInstance(); unitOfWork.complete(); } finally { unitOfWork.discard(); } } ); } @Mixins( TestComposite.TestMixin.class ) public interface TestComposite extends TransientComposite { void doStuff( @Optional String optional, String mandatory ); abstract class TestMixin implements TestComposite { public void doStuff( @Optional String optional, String mandatory ) { assertThat( "Mandatory is not null", mandatory, notNullValue() ); } } } public interface TestComposite2 extends TransientComposite { @Optional Property<String> optionalProperty(); Property<String> mandatoryProperty(); } public interface TestComposite3 extends EntityComposite { @Optional Association<TestComposite4> optionalAssociation(); Association<TestComposite4> mandatoryAssociation(); } public interface TestComposite4 extends EntityComposite { } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.presentation.campusmgnt; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.olat.data.basesecurity.BaseSecurity; import org.olat.data.basesecurity.Identity; import org.olat.data.user.UserConstants; import org.olat.lms.commons.util.BulkAction; import org.olat.lms.security.BaseSecurityEBL; import org.olat.lms.user.UserService; import org.olat.presentation.framework.core.UserRequest; import org.olat.presentation.framework.core.components.Component; import org.olat.presentation.framework.core.components.htmlheader.jscss.JSAndCSSComponent; import org.olat.presentation.framework.core.components.link.Link; import org.olat.presentation.framework.core.components.link.LinkFactory; import org.olat.presentation.framework.core.components.table.DefaultColumnDescriptor; import org.olat.presentation.framework.core.components.table.GenericObjectArrayTableDataModel; import org.olat.presentation.framework.core.components.table.TableController; import org.olat.presentation.framework.core.components.table.TableGuiConfiguration; import org.olat.presentation.framework.core.components.velocity.VelocityContainer; import org.olat.presentation.framework.core.control.Controller; import org.olat.presentation.framework.core.control.WindowControl; import org.olat.presentation.framework.core.control.controller.BasicController; import org.olat.presentation.framework.core.control.generic.wizard.WizardController; import org.olat.presentation.framework.core.translator.HeaderColumnTranslator; import org.olat.presentation.framework.core.translator.PackageTranslator; import org.olat.system.event.Event; import org.olat.system.spring.CoreSpringFactory; /** * Description:<br> * TODO: schneider Class Description for InOutWizardController * <P> * Initial Date: 19.12.2005 <br> * * @author Alexander Schneider */ public class InOutWizardController extends BasicController { private static final String CMD_FINISHED = "finished"; private HeaderColumnTranslator hcTranslator; private final WizardController wc; private TableController sepValTableCtr; private GenericObjectArrayTableDataModel sepValModel; private TableController sepValFinTableCtr; private final VelocityContainer sepValVC; private VelocityContainer keywithcolVC; private VelocityContainer colwithbulkactionVC; private VelocityContainer finishedVC; private final SeparatedValueInputForm sepValInForm; private KeyWithColumnForm kwcForm; private ColWithBulkActionForm cwbForm; private Link backLinkKey; private Link backLinkCol; private Link backLinkFin; private List columnNames; private List olatKeys; private final List bulkActions; private final int steps = 4; private TableGuiConfiguration tableConfig; private int selectedColForOlatKey; private int selectedOlatKey; private String selectedValueOfOlatKey; private List rows; private int numOfValuesPerLine; private int numOfLines; public InOutWizardController(final UserRequest ureq, final List bulkActions, final WindowControl wControl) { super(ureq, wControl); this.bulkActions = bulkActions; sepValVC = createVelocityContainer("inout"); wc = new WizardController(ureq, wControl, steps); listenTo(wc); sepValInForm = new SeparatedValueInputForm(ureq, wControl); listenTo(sepValInForm); wc.setWizardTitle(translate("wizard.step1.title")); wc.setNextWizardStep(translate("wizard.step1.howto"), sepValInForm.getInitialComponent()); sepValVC.put("wc", wc.getInitialComponent()); final JSAndCSSComponent xls_eg = new JSAndCSSComponent("xls_eg", this.getClass(), null, "xls_eg.css", true); sepValVC.put("xls_eg", xls_eg); putInitialPanel(sepValVC); } /** * This dispatches component events... * */ @Override protected void event(final UserRequest ureq, final Component source, final Event event) { if (source == backLinkKey) { wc.setWizardTitle(translate("wizard.step1.title")); wc.setBackWizardStep(translate("wizard.step1.howto"), sepValInForm.getInitialComponent()); // events from step 3 // preparing step 4 } else if (source == backLinkCol) { wc.setWizardTitle(translate("wizard.step2.title")); wc.setBackWizardStep(translate("wizard.step2.howto"), keywithcolVC); // events from 4. step } else if (source == finishedVC) { if (event.getCommand().equals(CMD_FINISHED)) { fireEvent(ureq, Event.DONE_EVENT); } } else if (source == backLinkFin) { wc.setWizardTitle(translate("wizard.step3.title")); wc.setBackWizardStep(translate("wizard.step3.howto"), colwithbulkactionVC); } } /** * This dispatches controller events... * */ @Override protected void event(final UserRequest ureq, final Controller source, final Event event) { // event from 1. step // preparing step 2 if (source == sepValInForm) { if (event == Event.DONE_EVENT) { keywithcolVC = createVelocityContainer("keywithcol"); backLinkKey = LinkFactory.createLinkBack(keywithcolVC, this); this.rows = new ArrayList(); // contains every input line as Object array rows = sepValInForm.getInputRows(); numOfValuesPerLine = sepValInForm.getNumOfValPerLine(); numOfLines = sepValInForm.getNumOfLines(); // convert user input to an OLAT table columnNames = new ArrayList(); tableConfig = new TableGuiConfiguration(); tableConfig.setDownloadOffered(false); tableConfig.setSortingEnabled(false); tableConfig.setColumnMovingOffered(false); hcTranslator = new HeaderColumnTranslator(getTranslator()); removeAsListenerAndDispose(sepValTableCtr); sepValTableCtr = new TableController(tableConfig, ureq, getWindowControl(), hcTranslator); listenTo(sepValTableCtr); for (int i = 0; i < numOfValuesPerLine + 1; i++) { // lenght+1 since adding the delimiter above sepValTableCtr.addColumnDescriptor(new DefaultColumnDescriptor("ccc" + (i + 1), i, null, ureq.getLocale())); // add every name of a column to a list deployed as pulldown to the user for matching column with olat key columnNames.add(translate("column", new String[] { "" + (i + 1) })); } sepValModel = new GenericObjectArrayTableDataModel(rows, numOfLines); sepValTableCtr.setTableDataModel(sepValModel); keywithcolVC.put("sepValTable", sepValTableCtr.getInitialComponent()); final PackageTranslator userTrans = new PackageTranslator("org.olat.presentation.user", ureq.getLocale()); final PackageTranslator properyHandlersTrans = new PackageTranslator("org.olat.lms.user.propertyhandler", ureq.getLocale()); olatKeys = new ArrayList(); // adding order is relevant for the "if-else if"-statement below at events from step 3 olatKeys.add(properyHandlersTrans.translate("form.name.institutionalUserIdentifier")); olatKeys.add(userTrans.translate("form.username")); olatKeys.add(userTrans.translate("form.email")); // add olatKeys and columnsNames to the form which displays it as pulldown menus removeAsListenerAndDispose(kwcForm); kwcForm = new KeyWithColumnForm(ureq, getWindowControl(), olatKeys, columnNames); listenTo(kwcForm); keywithcolVC.put("kwcForm", kwcForm.getInitialComponent()); wc.setWizardTitle(translate("wizard.step2.title")); wc.setNextWizardStep(translate("wizard.step2.howto"), keywithcolVC); } } // events from step 2 // preparing step 3 else if (source == kwcForm) { if (event == Event.DONE_EVENT) { // user clicked 'next'-button !!!!!!! selectedColForOlatKey = Integer.parseInt(kwcForm.getSelectedColumn()); selectedOlatKey = Integer.parseInt(kwcForm.getSelectedOlatKey()); selectedValueOfOlatKey = (String) olatKeys.get(selectedOlatKey); colwithbulkactionVC = createVelocityContainer("colwithbulkaction"); backLinkCol = LinkFactory.createLinkBack(colwithbulkactionVC, this); removeAsListenerAndDispose(sepValTableCtr); sepValTableCtr = new TableController(tableConfig, ureq, getWindowControl(), hcTranslator); listenTo(sepValTableCtr); columnNames = null; columnNames = new ArrayList(); for (int i = 0; i < numOfValuesPerLine + 1; i++) { // lenght+1 since adding the delimiter above if (i != selectedColForOlatKey) { sepValTableCtr.addColumnDescriptor(new DefaultColumnDescriptor("ccc" + (i + 1), i, null, ureq.getLocale())); // add every name of a column to a list deployed as pulldown to the user for matching column with olat key columnNames.add(translate("column", new String[] { "" + (i + 1) })); } else { sepValTableCtr.addColumnDescriptor(new DefaultColumnDescriptor("hhh" + selectedValueOfOlatKey, i, null, ureq.getLocale())); } } sepValTableCtr.setTableDataModel(sepValModel); colwithbulkactionVC.put("sepValTable", sepValTableCtr.getInitialComponent()); removeAsListenerAndDispose(cwbForm); cwbForm = new ColWithBulkActionForm(ureq, getWindowControl(), columnNames, bulkActions); listenTo(cwbForm); colwithbulkactionVC.put("cwbForm", cwbForm.getInitialComponent()); wc.setWizardTitle(translate("wizard.step3.title")); wc.setNextWizardStep(translate("wizard.step3.howto"), colwithbulkactionVC); } } else if (source == cwbForm) { if (event == Event.DONE_EVENT) { // user clicked 'next'-button !!!!!!! final List rowsFourthStep = new ArrayList(rows.size()); for (final Iterator iter = rows.iterator(); iter.hasNext();) { final Object[] values = (Object[]) iter.next(); rowsFourthStep.add(values.clone()); } final String selectedColForBulk = cwbForm.getSelectedColumn(); int colForBulk = Integer.parseInt(selectedColForBulk); // the selected column for the OLAT key was not more shown in the pulldownmenu for // for choosing the bulkaction, but it is not removed, therefore we have to increment // the colForBulk in certain cases if (selectedColForOlatKey <= colForBulk) { colForBulk++; } final String selectedBulk = cwbForm.getSelectedBulkAction(); final int bulk = Integer.parseInt(selectedBulk); finishedVC = createVelocityContainer("finished"); backLinkFin = LinkFactory.createLinkBack(finishedVC, this); final GenericObjectArrayTableDataModel sepValFinModel = new GenericObjectArrayTableDataModel(rowsFourthStep, numOfLines); final BulkAction ba = (BulkAction) bulkActions.get(bulk); final List identities = new ArrayList(sepValFinModel.getRowCount()); // read values from the column which the user has defined as olat key (e.g. username) // and add them to a list. for (int i = 0; i < sepValFinModel.getRowCount(); i++) { String val = (String) sepValFinModel.getValueAt(i, selectedColForOlatKey); val = val.trim(); Identity identity = null; if (selectedOlatKey == 0) { // matrikelnumber final Map<String, String> searchValue = new HashMap<String, String>(); searchValue.put(UserConstants.INSTITUTIONAL_MATRICULATION_NUMBER, val); final List<Identity> identitiesFoundByInst = getBaseSecurityEBL().getIdentitiesByInstitutialUserIdentifier(searchValue); // FIXME:as:b error handling if there is more than one identity found by institutionalUserIdentifier // see also in BulkAssessmentWizardController if (identitiesFoundByInst.size() == 1) { identity = (Identity) identitiesFoundByInst.get(0); } } else if (selectedOlatKey == 1) { // username identity = getBaseSecurity().findIdentityByName(val); } else if (selectedOlatKey == 2) { // email identity = getUserService().findIdentityByEmail(val); } identities.add(identity); } // get results from the user chosen bulk action for every identity final List bulkResults = ba.doAction(identities); // add the bulk results to the data model for (int i = 0; i < sepValFinModel.getRowCount(); i++) { final String result = (String) bulkResults.get(i); sepValFinModel.setValueAt(result, i, colForBulk); } tableConfig.setDownloadOffered(true); removeAsListenerAndDispose(sepValFinTableCtr); sepValFinTableCtr = new TableController(tableConfig, ureq, getWindowControl(), hcTranslator); listenTo(sepValFinTableCtr); columnNames = null; columnNames = new ArrayList(); for (int i = 0; i < numOfValuesPerLine + 1; i++) { if (i == selectedColForOlatKey) { sepValFinTableCtr.addColumnDescriptor(new DefaultColumnDescriptor("hhh" + selectedValueOfOlatKey, i, null, ureq.getLocale())); } else if (i == colForBulk) { sepValFinTableCtr.addColumnDescriptor(new DefaultColumnDescriptor("hhh" + ba.getDisplayName(), i, null, ureq.getLocale())); } else { sepValFinTableCtr.addColumnDescriptor(new DefaultColumnDescriptor("ccc" + (i + 1), i, null, ureq.getLocale())); } } sepValFinTableCtr.setTableDataModel(sepValFinModel); finishedVC.put("sepValTable", sepValFinTableCtr.getInitialComponent()); wc.setWizardTitle(translate("wizard.step4.title")); wc.setNextWizardStep(translate("wizard.step4.howto"), finishedVC); } } else if (source == wc) { if (event == Event.CANCELLED_EVENT) { fireEvent(ureq, event); } } } private BaseSecurityEBL getBaseSecurityEBL() { return (BaseSecurityEBL) CoreSpringFactory.getBean(BaseSecurityEBL.class); } private BaseSecurity getBaseSecurity() { return (BaseSecurity) CoreSpringFactory.getBean(BaseSecurity.class); } @Override protected void doDispose() { // } private UserService getUserService() { return CoreSpringFactory.getBean(UserService.class); } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.taobao.android.dx.ssa; import com.taobao.android.dx.rop.code.Insn; import com.taobao.android.dx.rop.code.LocalItem; import com.taobao.android.dx.rop.code.RegOps; import com.taobao.android.dx.rop.code.RegisterSpec; import com.taobao.android.dx.rop.code.RegisterSpecList; import com.taobao.android.dx.rop.code.Rop; /** * A "normal" (non-phi) instruction in SSA form. Always wraps a rop insn. */ public final class NormalSsaInsn extends SsaInsn implements Cloneable { /** {@code non-null;} rop insn that we're wrapping */ private Insn insn; /** * Creates an instance. * * @param insn Rop insn to wrap * @param block block that contains this insn */ NormalSsaInsn(final Insn insn, final SsaBasicBlock block) { super(insn.getResult(), block); this.insn = insn; } /** {@inheritDoc} */ @Override public final void mapSourceRegisters(RegisterMapper mapper) { RegisterSpecList oldSources = insn.getSources(); RegisterSpecList newSources = mapper.map(oldSources); if (newSources != oldSources) { insn = insn.withNewRegisters(getResult(), newSources); getBlock().getParent().onSourcesChanged(this, oldSources); } } /** * Changes one of the insn's sources. New source should be of same type * and category. * * @param index {@code >=0;} index of source to change * @param newSpec spec for new source */ public final void changeOneSource(int index, RegisterSpec newSpec) { RegisterSpecList origSources = insn.getSources(); int sz = origSources.size(); RegisterSpecList newSources = new RegisterSpecList(sz); for (int i = 0; i < sz; i++) { newSources.set(i, i == index ? newSpec : origSources.get(i)); } newSources.setImmutable(); RegisterSpec origSpec = origSources.get(index); if (origSpec.getReg() != newSpec.getReg()) { /* * If the register remains unchanged, we're only changing * the type or local var name so don't update use list */ getBlock().getParent().onSourceChanged(this, origSpec, newSpec); } insn = insn.withNewRegisters(getResult(), newSources); } /** * Changes the source list of the insn. New source list should be the * same size and consist of sources of identical types. * * @param newSources non-null new sources list. */ public final void setNewSources (RegisterSpecList newSources) { RegisterSpecList origSources = insn.getSources(); if (origSources.size() != newSources.size()) { throw new RuntimeException("Sources counts don't match"); } insn = insn.withNewRegisters(getResult(), newSources); } /** {@inheritDoc} */ @Override public NormalSsaInsn clone() { return (NormalSsaInsn) super.clone(); } /** * Like rop.Insn.getSources(). * * @return {@code null-ok;} sources list */ @Override public RegisterSpecList getSources() { return insn.getSources(); } /** {@inheritDoc} */ public String toHuman() { return toRopInsn().toHuman(); } /** {@inheritDoc} */ @Override public Insn toRopInsn() { return insn.withNewRegisters(getResult(), insn.getSources()); } /** * @return the Rop opcode for this insn */ @Override public Rop getOpcode() { return insn.getOpcode(); } /** {@inheritDoc} */ @Override public Insn getOriginalRopInsn() { return insn; } /** {@inheritDoc} */ @Override public RegisterSpec getLocalAssignment() { RegisterSpec assignment; if (insn.getOpcode().getOpcode() == RegOps.MARK_LOCAL) { assignment = insn.getSources().get(0); } else { assignment = getResult(); } if (assignment == null) { return null; } LocalItem local = assignment.getLocalItem(); if (local == null) { return null; } return assignment; } /** * Upgrades this insn to a version that represents the constant source * literally. If the upgrade is not possible, this does nothing. * * @see Insn#withSourceLiteral */ public void upgradeToLiteral() { RegisterSpecList oldSources = insn.getSources(); insn = insn.withSourceLiteral(); getBlock().getParent().onSourcesChanged(this, oldSources); } /** * @return true if this is a move (but not a move-operand) instruction */ @Override public boolean isNormalMoveInsn() { return insn.getOpcode().getOpcode() == RegOps.MOVE; } /** {@inheritDoc} */ @Override public boolean isMoveException() { return insn.getOpcode().getOpcode() == RegOps.MOVE_EXCEPTION; } /** {@inheritDoc} */ @Override public boolean canThrow() { return insn.canThrow(); } /** {@inheritDoc} */ @Override public void accept(Visitor v) { if (isNormalMoveInsn()) { v.visitMoveInsn(this); } else { v.visitNonMoveInsn(this); } } /** {@inheritDoc} */ @Override public boolean isPhiOrMove() { return isNormalMoveInsn(); } /** * {@inheritDoc} * * TODO: Increase the scope of this. */ @Override public boolean hasSideEffect() { Rop opcode = getOpcode(); if (opcode.getBranchingness() != Rop.BRANCH_NONE) { return true; } boolean hasLocalSideEffect = Optimizer.getPreserveLocals() && getLocalAssignment() != null; switch (opcode.getOpcode()) { case RegOps.MOVE_RESULT: case RegOps.MOVE: case RegOps.CONST: return hasLocalSideEffect; default: return true; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cordova.http; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Enumeration; import javax.microedition.io.Connector; import javax.microedition.io.HttpConnection; import javax.microedition.io.file.FileConnection; import org.apache.cordova.CordovaExtension; import org.apache.cordova.api.PluginResult; import org.apache.cordova.json4j.JSONException; import org.apache.cordova.json4j.JSONObject; import org.apache.cordova.util.Logger; import net.rim.device.api.io.FileNotFoundException; import net.rim.device.api.io.IOUtilities; import net.rim.device.api.io.MIMETypeAssociations; import net.rim.device.api.io.http.HttpProtocolConstants; import net.rim.device.api.ui.UiApplication; /** * The FileUploader uses an HTTP multipart request to upload files on the * device to a remote server. It currently supports a single file per HTTP * request. */ public class FileUploader { /** * Constants */ private static final String BOUNDARY = "----0x2fc1b3ef7cecbf14L"; private static final String LINE_END = "\r\n"; private static final String TD = "--"; private Integer responseCode = null; /** * Uploads the specified file to the server URL provided using an HTTP * multipart request. * @param filePath Full path of the file on the file system * @param server URL of the server to receive the file * @param fileKey Name of file request parameter * @param fileName File name to be used on server * @param mimeType Describes file content type * @param params key:value pairs of user-defined parameters * @return FileUploadResult containing result of upload request */ public FileUploadResult upload(String filePath, String server, String fileKey, String fileName, String mimeType, JSONObject params, JSONObject headers) throws FileNotFoundException, IllegalArgumentException, IOException { Logger.log(this.getClass().getName() + ": uploading " + filePath + " to " + server); FileUploadResult result = new FileUploadResult(); InputStream in = null; OutputStream out = null; FileConnection fconn = null; HttpConnection httpConn = null; try { // open connection to the file try { fconn = (FileConnection)Connector.open(filePath, Connector.READ); } catch (ClassCastException e) { // in case something really funky gets passed in throw new IllegalArgumentException("Invalid file path"); } catch (IOException e) { throw new FileNotFoundException("Failed to open source file: " + filePath); } if (!fconn.exists()) { throw new FileNotFoundException(filePath + " not found"); } // determine mime type by // 1) user-provided type // 2) retrieve from file system // 3) default to JPEG if (mimeType == null) { mimeType = MIMETypeAssociations.getMIMEType(filePath); if (mimeType == null) { mimeType = HttpProtocolConstants.CONTENT_TYPE_IMAGE_JPEG; } } // boundary messages String boundaryMsg = getBoundaryMessage(fileKey, fileName, mimeType); String lastBoundary = getEndBoundary(); // user-defined request parameters String customParams = (params != null) ? getParameterContent(params) : ""; Logger.log(this.getClass().getName() + ": params=" + customParams); // determine content length long fileSize = fconn.fileSize(); Logger.log(this.getClass().getName() + ": " + filePath + " size=" + fileSize + " bytes"); long contentLength = fileSize + (long)boundaryMsg.length() + (long)lastBoundary.length() + (long)customParams.length(); // get HttpConnection httpConn = HttpUtils.getHttpConnection(server); if (httpConn == null) { throw new IOException("Failed to connect to " + server); } Logger.log(this.getClass().getName() + ": server URL=" + httpConn.getURL()); // set request headers httpConn.setRequestMethod(HttpConnection.POST); httpConn.setRequestProperty( HttpProtocolConstants.HEADER_USER_AGENT, System.getProperty("browser.useragent")); httpConn.setRequestProperty( HttpProtocolConstants.HEADER_KEEP_ALIVE, "300"); httpConn.setRequestProperty( HttpProtocolConstants.HEADER_CONNECTION, "keep-alive"); httpConn.setRequestProperty( HttpProtocolConstants.HEADER_CONTENT_TYPE, HttpProtocolConstants.CONTENT_TYPE_MULTIPART_FORM_DATA + "; boundary=" + BOUNDARY); httpConn.setRequestProperty( HttpProtocolConstants.HEADER_CONTENT_LENGTH, Long.toString(contentLength)); if(headers != null){ for(Enumeration e = headers.keys(); e.hasMoreElements();){ String key = e.nextElement().toString(); String value = headers.optString(key); Logger.log(this.getClass().getName() + ": key=" + key + " value=" + value); httpConn.setRequestProperty(key, value); } } // set cookie String cookie = HttpUtils.getCookie(server); if (cookie != null) { httpConn.setRequestProperty(HttpProtocolConstants.HEADER_COOKIE, cookie); Logger.log(this.getClass().getName() + ": cookie=" + cookie); } // write... out = httpConn.openDataOutputStream(); // parameters out.write(customParams.getBytes()); // boundary out.write(boundaryMsg.getBytes()); // file data in = fconn.openInputStream(); byte[] data = IOUtilities.streamToBytes(in); out.write(data); in.close(); // end boundary out.write(lastBoundary.getBytes()); // send request and get response in = httpConn.openDataInputStream(); //int rc = httpConn.getResponseCode(); result.setResponse(new String(IOUtilities.streamToBytes(in))); //result.setResponseCode(rc); result.setBytesSent(contentLength); Logger.log(this.getClass().getName() + ": sent " + contentLength + " bytes"); } finally { if (httpConn != null) { result.setResponseCode(httpConn.getResponseCode()); responseCode = new Integer(httpConn.getResponseCode()); } try { if (fconn != null) fconn.close(); if (in != null) in.close(); if (out != null) out.close(); if (httpConn != null) httpConn.close(); } catch (IOException e) { Logger.log(this.getClass().getName() + ": " + e); } } return result; } /** * Sends an upload progress notification back to JavaScript engine. * @param result FileUploadResult containing bytes sent of total * @param callbackId identifier of callback function to invoke */ protected void sendProgress(FileUploadResult result, final String callbackId) { JSONObject o = null; try { o = result.toJSONObject(); } catch (JSONException e) { Logger.log(this.getClass().getName() + ": " + e); return; } // send a progress result final PluginResult r = new PluginResult(PluginResult.Status.OK, o); r.setKeepCallback(true); UiApplication.getUiApplication().invokeAndWait( new Runnable() { public void run() { CordovaExtension.invokeSuccessCallback(callbackId, r); } } ); } /** * Returns the boundary string that represents the beginning of a file * in a multipart HTTP request. * @param fileKey Name of file request parameter * @param fileName File name to be used on server * @param mimeType Describes file content type * @return string representing the boundary message in a multipart HTTP request */ protected String getBoundaryMessage(String fileKey, String fileName, String mimeType) { return (new StringBuffer()) .append(TD).append(BOUNDARY).append(LINE_END) .append("Content-Disposition: form-data; name=\"").append(fileKey) .append("\"; filename=\"").append(fileName).append("\"").append(LINE_END) .append("Content-Type: ").append(mimeType).append(LINE_END) .append(LINE_END) .toString(); } /** * Returns the boundary string that represents the end of a file in a * multipart HTTP request. * @return string representing the end boundary message in a multipart HTTP request */ protected String getEndBoundary() { return LINE_END + TD + BOUNDARY + TD + LINE_END; } /** * Returns HTTP form content containing specified parameters. */ protected String getParameterContent(JSONObject params) { StringBuffer buf = new StringBuffer(); for (Enumeration e = params.keys(); e.hasMoreElements();) { String key = e.nextElement().toString(); String value = params.optString(key); buf.append(TD).append(BOUNDARY).append(LINE_END) .append("Content-Disposition: form-data; name=\"").append(key).append("\"") .append(LINE_END).append(LINE_END) .append(value).append(LINE_END); } return buf.toString(); } Integer getResponseCode() { return responseCode; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.jms.processors; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.jms.BytesMessage; import javax.jms.JMSException; import javax.jms.MapMessage; import javax.jms.Message; import javax.jms.ObjectMessage; import javax.jms.Session; import javax.jms.StreamMessage; import javax.jms.TextMessage; import javax.jms.Topic; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.SerializationUtils; import org.apache.nifi.jms.processors.JMSConsumer.ConsumerCallback; import org.apache.nifi.jms.processors.JMSConsumer.JMSResponse; import org.apache.nifi.logging.ComponentLog; import org.junit.Test; import org.springframework.jms.connection.CachingConnectionFactory; import org.springframework.jms.core.JmsTemplate; import org.springframework.jms.core.MessageCreator; import org.springframework.jms.support.JmsHeaders; public class JMSPublisherConsumerIT { @Test public void testObjectMessage() throws Exception { final String destinationName = "testObjectMessage"; MessageCreator messageCreator = session -> { ObjectMessage message = session.createObjectMessage(); message.setObject("stringAsObject"); return message; }; ConsumerCallback responseChecker = response -> { assertEquals( "stringAsObject", SerializationUtils.deserialize(response.getMessageBody()) ); }; testMessage(destinationName, messageCreator, responseChecker); } @Test public void testStreamMessage() throws Exception { final String destinationName = "testStreamMessage"; MessageCreator messageCreator = session -> { StreamMessage message = session.createStreamMessage(); message.writeBoolean(true); message.writeByte(Integer.valueOf(1).byteValue()); message.writeBytes(new byte[] {2, 3, 4}); message.writeShort((short)32); message.writeInt(64); message.writeLong(128L); message.writeFloat(1.25F); message.writeDouble(100.867); message.writeChar('c'); message.writeString("someString"); message.writeObject("stringAsObject"); return message; }; byte[] expected; try ( ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream); ) { dataOutputStream.writeBoolean(true); dataOutputStream.writeByte(1); dataOutputStream.write(new byte[] {2, 3, 4}); dataOutputStream.writeShort((short)32); dataOutputStream.writeInt(64); dataOutputStream.writeLong(128L); dataOutputStream.writeFloat(1.25F); dataOutputStream.writeDouble(100.867); dataOutputStream.writeChar('c'); dataOutputStream.writeUTF("someString"); dataOutputStream.writeUTF("stringAsObject"); dataOutputStream.flush(); expected = byteArrayOutputStream.toByteArray(); } ConsumerCallback responseChecker = response -> { byte[] actual = response.getMessageBody(); assertArrayEquals( expected, actual ); }; testMessage(destinationName, messageCreator, responseChecker); } @Test public void testMapMessage() throws Exception { final String destinationName = "testObjectMessage"; MessageCreator messageCreator = session -> { MapMessage message = session.createMapMessage(); message.setBoolean("boolean", true); message.setByte("byte", Integer.valueOf(1).byteValue()); message.setBytes("bytes", new byte[] {2, 3, 4}); message.setShort("short", (short)32); message.setInt("int", 64); message.setLong("long", 128L); message.setFloat("float", 1.25F); message.setDouble("double", 100.867); message.setChar("char", 'c'); message.setString("string", "someString"); message.setObject("object", "stringAsObject"); return message; }; String expectedJson = "{" + "\"boolean\":true," + "\"byte\":1," + "\"bytes\":[2, 3, 4]," + "\"short\":32," + "\"int\":64," + "\"long\":128," + "\"float\":1.25," + "\"double\":100.867," + "\"char\":\"c\"," + "\"string\":\"someString\"," + "\"object\":\"stringAsObject\"" + "}"; testMapMessage(destinationName, messageCreator, expectedJson); } private void testMapMessage(String destinationName, MessageCreator messageCreator, String expectedJson) { ConsumerCallback responseChecker = response -> { ObjectMapper objectMapper = new ObjectMapper(); try { Map<String, Object> actual = objectMapper.readValue(response.getMessageBody(), new TypeReference<Map<String, Object>>() {}); Map<String, Object> expected = objectMapper.readValue(expectedJson.getBytes(), new TypeReference<Map<String, Object>>() {}); assertEquals(expected, actual); } catch (IOException e) { throw new RuntimeException(e); } }; testMessage(destinationName, messageCreator, responseChecker); } private void testMessage(String destinationName, MessageCreator messageCreator, ConsumerCallback responseChecker) { JmsTemplate jmsTemplate = CommonTest.buildJmsTemplateForDestination(false); AtomicBoolean callbackInvoked = new AtomicBoolean(); try { jmsTemplate.send(destinationName, messageCreator); JMSConsumer consumer = new JMSConsumer((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); consumer.consume(destinationName, null, false, false, null, null, "UTF-8", response -> { callbackInvoked.set(true); responseChecker.accept(response); }); assertTrue(callbackInvoked.get()); } finally { ((CachingConnectionFactory) jmsTemplate.getConnectionFactory()).destroy(); } } @Test public void validateBytesConvertedToBytesMessageOnSend() throws Exception { final String destinationName = "validateBytesConvertedToBytesMessageOnSend"; JmsTemplate jmsTemplate = CommonTest.buildJmsTemplateForDestination(false); try { JMSPublisher publisher = new JMSPublisher((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); publisher.publish(destinationName, "hellomq".getBytes()); Message receivedMessage = jmsTemplate.receive(destinationName); assertTrue(receivedMessage instanceof BytesMessage); byte[] bytes = new byte[7]; ((BytesMessage) receivedMessage).readBytes(bytes); assertEquals("hellomq", new String(bytes)); } finally { ((CachingConnectionFactory) jmsTemplate.getConnectionFactory()).destroy(); } } @Test public void validateJmsHeadersAndPropertiesAreTransferredFromFFAttributes() throws Exception { final String destinationName = "validateJmsHeadersAndPropertiesAreTransferredFromFFAttributes"; JmsTemplate jmsTemplate = CommonTest.buildJmsTemplateForDestination(false); try { JMSPublisher publisher = new JMSPublisher((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); Map<String, String> flowFileAttributes = new HashMap<>(); flowFileAttributes.put("foo", "foo"); flowFileAttributes.put("hyphen-property", "value"); flowFileAttributes.put("fullstop.property", "value"); flowFileAttributes.put(JmsHeaders.REPLY_TO, "myTopic"); flowFileAttributes.put(JmsHeaders.DELIVERY_MODE, "1"); flowFileAttributes.put(JmsHeaders.PRIORITY, "1"); flowFileAttributes.put(JmsHeaders.EXPIRATION, "never"); // value expected to be integer, make sure non-integer doesn't cause problems publisher.publish(destinationName, "hellomq".getBytes(), flowFileAttributes); Message receivedMessage = jmsTemplate.receive(destinationName); assertTrue(receivedMessage instanceof BytesMessage); assertEquals("foo", receivedMessage.getStringProperty("foo")); assertTrue(receivedMessage.propertyExists("hyphen-property")); assertTrue(receivedMessage.propertyExists("fullstop.property")); assertTrue(receivedMessage.getJMSReplyTo() instanceof Topic); assertEquals(1, receivedMessage.getJMSDeliveryMode()); assertEquals(1, receivedMessage.getJMSPriority()); assertEquals("myTopic", ((Topic) receivedMessage.getJMSReplyTo()).getTopicName()); } finally { ((CachingConnectionFactory) jmsTemplate.getConnectionFactory()).destroy(); } } /** * At the moment the only two supported message types are TextMessage and * BytesMessage which is sufficient for the type if JMS use cases NiFi is * used. The may change to the point where all message types are supported * at which point this test will no be longer required. */ @Test public void validateFailOnUnsupportedMessageType() throws Exception { final String destinationName = "validateFailOnUnsupportedMessageType"; JmsTemplate jmsTemplate = CommonTest.buildJmsTemplateForDestination(false); try { jmsTemplate.send(destinationName, new MessageCreator() { @Override public Message createMessage(Session session) throws JMSException { return session.createObjectMessage(); } }); JMSConsumer consumer = new JMSConsumer((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); consumer.consume(destinationName, null, false, false, null, null, "UTF-8", new ConsumerCallback() { @Override public void accept(JMSResponse response) { // noop } }); } finally { ((CachingConnectionFactory) jmsTemplate.getConnectionFactory()).destroy(); } } @Test public void validateConsumeWithCustomHeadersAndProperties() throws Exception { final String destinationName = "validateConsumeWithCustomHeadersAndProperties"; JmsTemplate jmsTemplate = CommonTest.buildJmsTemplateForDestination(false); try { jmsTemplate.send(destinationName, new MessageCreator() { @Override public Message createMessage(Session session) throws JMSException { TextMessage message = session.createTextMessage("hello from the other side"); message.setStringProperty("foo", "foo"); message.setBooleanProperty("bar", false); message.setJMSReplyTo(session.createQueue("fooQueue")); return message; } }); JMSConsumer consumer = new JMSConsumer((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); final AtomicBoolean callbackInvoked = new AtomicBoolean(); consumer.consume(destinationName, null, false, false, null, null, "UTF-8", new ConsumerCallback() { @Override public void accept(JMSResponse response) { callbackInvoked.set(true); assertEquals("hello from the other side", new String(response.getMessageBody())); assertEquals("fooQueue", response.getMessageHeaders().get(JmsHeaders.REPLY_TO)); assertEquals("foo", response.getMessageProperties().get("foo")); assertEquals("false", response.getMessageProperties().get("bar")); } }); assertTrue(callbackInvoked.get()); } finally { ((CachingConnectionFactory) jmsTemplate.getConnectionFactory()).destroy(); } } @Test(timeout = 20000) public void testMultipleThreads() throws Exception { String destinationName = "testMultipleThreads"; JmsTemplate publishTemplate = CommonTest.buildJmsTemplateForDestination(false); final CountDownLatch consumerTemplateCloseCount = new CountDownLatch(4); try { JMSPublisher publisher = new JMSPublisher((CachingConnectionFactory) publishTemplate.getConnectionFactory(), publishTemplate, mock(ComponentLog.class)); for (int i = 0; i < 4000; i++) { publisher.publish(destinationName, String.valueOf(i).getBytes(StandardCharsets.UTF_8)); } final AtomicInteger msgCount = new AtomicInteger(0); final ConsumerCallback callback = new ConsumerCallback() { @Override public void accept(JMSResponse response) { msgCount.incrementAndGet(); } }; final Thread[] threads = new Thread[4]; for (int i = 0; i < 4; i++) { final Thread t = new Thread(() -> { JmsTemplate consumeTemplate = CommonTest.buildJmsTemplateForDestination(false); try { JMSConsumer consumer = new JMSConsumer((CachingConnectionFactory) consumeTemplate.getConnectionFactory(), consumeTemplate, mock(ComponentLog.class)); for (int j = 0; j < 1000 && msgCount.get() < 4000; j++) { consumer.consume(destinationName, null, false, false, null, null, "UTF-8", callback); } } finally { ((CachingConnectionFactory) consumeTemplate.getConnectionFactory()).destroy(); consumerTemplateCloseCount.countDown(); } }); threads[i] = t; t.start(); } int iterations = 0; while (msgCount.get() < 4000) { Thread.sleep(10L); if (++iterations % 100 == 0) { System.out.println(msgCount.get() + " messages received so far"); } } } finally { ((CachingConnectionFactory) publishTemplate.getConnectionFactory()).destroy(); consumerTemplateCloseCount.await(); } } @Test(timeout = 10000) public void validateMessageRedeliveryWhenNotAcked() throws Exception { String destinationName = "validateMessageRedeliveryWhenNotAcked"; JmsTemplate jmsTemplate = CommonTest.buildJmsTemplateForDestination(false); try { JMSPublisher publisher = new JMSPublisher((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); publisher.publish(destinationName, "1".getBytes(StandardCharsets.UTF_8)); publisher.publish(destinationName, "2".getBytes(StandardCharsets.UTF_8)); JMSConsumer consumer = new JMSConsumer((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); final AtomicBoolean callbackInvoked = new AtomicBoolean(); try { consumer.consume(destinationName, null, false, false, null, null, "UTF-8", new ConsumerCallback() { @Override public void accept(JMSResponse response) { callbackInvoked.set(true); assertEquals("1", new String(response.getMessageBody())); throw new RuntimeException("intentional to avoid explicit ack"); } }); } catch (Exception e) { // expected } assertTrue(callbackInvoked.get()); callbackInvoked.set(false); // should receive the same message, but will process it successfully while (!callbackInvoked.get()) { consumer.consume(destinationName, null, false, false, null, null, "UTF-8", new ConsumerCallback() { @Override public void accept(JMSResponse response) { if (response == null) { return; } callbackInvoked.set(true); assertEquals("1", new String(response.getMessageBody())); } }); } assertTrue(callbackInvoked.get()); callbackInvoked.set(false); // receiving next message and fail again try { while (!callbackInvoked.get()) { consumer.consume(destinationName, null, false, false, null, null, "UTF-8", new ConsumerCallback() { @Override public void accept(JMSResponse response) { if (response == null) { return; } callbackInvoked.set(true); assertEquals("2", new String(response.getMessageBody())); throw new RuntimeException("intentional to avoid explicit ack"); } }); } } catch (Exception e) { // ignore } assertTrue(callbackInvoked.get()); callbackInvoked.set(false); // should receive the same message, but will process it successfully try { while (!callbackInvoked.get()) { consumer.consume(destinationName, null, false, false, null, null, "UTF-8", new ConsumerCallback() { @Override public void accept(JMSResponse response) { if (response == null) { return; } callbackInvoked.set(true); assertEquals("2", new String(response.getMessageBody())); } }); } } catch (Exception e) { // ignore } } finally { ((CachingConnectionFactory) jmsTemplate.getConnectionFactory()).destroy(); } } @Test public void testMessageSelector() { String destinationName = "testMessageSelector"; JmsTemplate jmsTemplate = CommonTest.buildJmsTemplateForDestination(false); String messageSelector = "prop = '1'"; try { jmsTemplate.send(destinationName, session -> session.createTextMessage("msg0")); jmsTemplate.send(destinationName, session -> { TextMessage message = session.createTextMessage("msg1"); message.setStringProperty("prop", "1"); return message; }); jmsTemplate.send(destinationName, session -> { TextMessage message = session.createTextMessage("msg2"); message.setStringProperty("prop", "2"); return message; }); JMSConsumer consumer = new JMSConsumer((CachingConnectionFactory) jmsTemplate.getConnectionFactory(), jmsTemplate, mock(ComponentLog.class)); AtomicBoolean callbackInvoked = new AtomicBoolean(); consumer.consume(destinationName, null, false, false, null, messageSelector, "UTF-8", new ConsumerCallback() { @Override public void accept(JMSResponse response) { callbackInvoked.set(true); assertEquals("msg1", new String(response.getMessageBody())); } }); assertTrue(callbackInvoked.get()); } finally { ((CachingConnectionFactory) jmsTemplate.getConnectionFactory()).destroy(); } } }
/** * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.bitcoin.core; import javax.annotation.Nullable; import java.io.IOException; import java.io.OutputStream; import java.net.InetAddress; import java.net.UnknownHostException; /** * A VersionMessage holds information exchanged during connection setup with another peer. Most of the fields are not * particularly interesting. The subVer field, since BIP 14, acts as a User-Agent string would. You can and should * append to or change the subVer for your own software so other implementations can identify it, and you can look at * the subVer field received from other nodes to see what they are running. If blank, it means the Satoshi client.<p> * * After creating yourself a VersionMessage, you can pass it to {@link PeerGroup#setVersionMessage(VersionMessage)} * to ensure it will be used for each new connection. */ public class VersionMessage extends Message { private static final long serialVersionUID = 7313594258967483180L; /** * A services flag that denotes whether the peer has a copy of the block chain or not. */ public static final int NODE_NETWORK = 1; /** * The version number of the protocol spoken. */ public int clientVersion; /** * Flags defining what is supported. Right now {@link #NODE_NETWORK} is the only flag defined. */ public long localServices; /** * What the other side believes the current time to be, in seconds. */ public long time; /** * What the other side believes the address of this program is. Not used. */ public PeerAddress myAddr; /** * What the other side believes their own address is. Not used. */ public PeerAddress theirAddr; /** * An additional string that today the official client sets to the empty string. We treat it as something like an * HTTP User-Agent header. */ public String subVer; /** * How many blocks are in the chain, according to the other side. */ public long bestHeight; /** * Whether or not to relay tx invs before a filter is received */ public boolean relayTxesBeforeFilter; /** The version of this library release, as a string. */ public static final String BITCOINJ_VERSION = "0.11"; /** The value that is prepended to the subVer field of this application. */ public static final String LIBRARY_SUBVER = "/BitCoinJ:" + BITCOINJ_VERSION + "/"; public VersionMessage(NetworkParameters params, byte[] msg) throws ProtocolException { super(params, msg, 0); } // It doesn't really make sense to ever lazily parse a version message or to retain the backing bytes. // If you're receiving this on the wire you need to check the protocol version and it will never need to be sent // back down the wire. /** Equivalent to VersionMessage(params, newBestHeight, true) */ public VersionMessage(NetworkParameters params, int newBestHeight) { this(params, newBestHeight, true); } public VersionMessage(NetworkParameters params, int newBestHeight, boolean relayTxesBeforeFilter) { super(params); clientVersion = NetworkParameters.PROTOCOL_VERSION; localServices = 0; time = System.currentTimeMillis() / 1000; // Note that the official client doesn't do anything with these, and finding out your own external IP address // is kind of tricky anyway, so we just put nonsense here for now. try { // We hard-code the IPv4 localhost address here rather than use InetAddress.getLocalHost() because some // mobile phones have broken localhost DNS entries, also, this is faster. final byte[] localhost = { 127, 0, 0, 1 }; myAddr = new PeerAddress(InetAddress.getByAddress(localhost), params.getPort(), 0); theirAddr = new PeerAddress(InetAddress.getByAddress(localhost), params.getPort(), 0); } catch (UnknownHostException e) { throw new RuntimeException(e); // Cannot happen (illegal IP length). } subVer = LIBRARY_SUBVER; bestHeight = newBestHeight; this.relayTxesBeforeFilter = relayTxesBeforeFilter; length = 85; if (protocolVersion > 31402) length += 8; length += VarInt.sizeOf(subVer.length()) + subVer.length(); } @Override protected void parseLite() throws ProtocolException { // NOP. VersionMessage is never lazy parsed. } @Override public void parse() throws ProtocolException { if (parsed) return; parsed = true; clientVersion = (int) readUint32(); localServices = readUint64().longValue(); time = readUint64().longValue(); myAddr = new PeerAddress(params, bytes, cursor, 0); cursor += myAddr.getMessageSize(); theirAddr = new PeerAddress(params, bytes, cursor, 0); cursor += theirAddr.getMessageSize(); // uint64 localHostNonce (random data) // We don't care about the localhost nonce. It's used to detect connecting back to yourself in cases where // there are NATs and proxies in the way. However we don't listen for inbound connections so it's irrelevant. readUint64(); try { // Initialize default values for flags which may not be sent by old nodes subVer = ""; bestHeight = 0; relayTxesBeforeFilter = true; if (!hasMoreBytes()) return; // string subVer (currently "") subVer = readStr(); if (!hasMoreBytes()) return; // int bestHeight (size of known block chain). bestHeight = readUint32(); if (!hasMoreBytes()) return; relayTxesBeforeFilter = readBytes(1)[0] != 0; } finally { length = cursor - offset; } } @Override public void bitcoinSerializeToStream(OutputStream buf) throws IOException { Utils.uint32ToByteStreamLE(clientVersion, buf); Utils.uint32ToByteStreamLE(localServices, buf); Utils.uint32ToByteStreamLE(localServices >> 32, buf); Utils.uint32ToByteStreamLE(time, buf); Utils.uint32ToByteStreamLE(time >> 32, buf); try { // My address. myAddr.bitcoinSerialize(buf); // Their address. theirAddr.bitcoinSerialize(buf); } catch (UnknownHostException e) { throw new RuntimeException(e); // Can't happen. } catch (IOException e) { throw new RuntimeException(e); // Can't happen. } // Next up is the "local host nonce", this is to detect the case of connecting // back to yourself. We don't care about this as we won't be accepting inbound // connections. Utils.uint32ToByteStreamLE(0, buf); Utils.uint32ToByteStreamLE(0, buf); // Now comes subVer. byte[] subVerBytes = subVer.getBytes("UTF-8"); buf.write(new VarInt(subVerBytes.length).encode()); buf.write(subVerBytes); // Size of known block chain. Utils.uint32ToByteStreamLE(bestHeight, buf); buf.write(relayTxesBeforeFilter ? 1 : 0); } /** * Returns true if the version message indicates the sender has a full copy of the block chain, * or if it's running in client mode (only has the headers). */ public boolean hasBlockChain() { return (localServices & NODE_NETWORK) == NODE_NETWORK; } @Override public boolean equals(Object o) { if (!(o instanceof VersionMessage)) return false; VersionMessage other = (VersionMessage) o; return other.bestHeight == bestHeight && other.clientVersion == clientVersion && other.localServices == localServices && other.time == time && other.subVer.equals(subVer) && other.myAddr.equals(myAddr) && other.theirAddr.equals(theirAddr) && other.relayTxesBeforeFilter == relayTxesBeforeFilter; } /** * VersionMessage does not handle cached byte array so should not have a cached checksum. */ @Override byte[] getChecksum() { throw new UnsupportedOperationException(); } /** * VersionMessage does not handle cached byte array so should not have a cached checksum. */ @Override public void setChecksum(byte[] checksum) { throw new UnsupportedOperationException(); } @Override public int hashCode() { return (int) bestHeight ^ clientVersion ^ (int) localServices ^ (int) time ^ subVer.hashCode() ^ myAddr.hashCode() ^ theirAddr.hashCode() * (relayTxesBeforeFilter ? 1 : 2); } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("\n"); sb.append("client version: ").append(clientVersion).append("\n"); sb.append("local services: ").append(localServices).append("\n"); sb.append("time: ").append(time).append("\n"); sb.append("my addr: ").append(myAddr).append("\n"); sb.append("their addr: ").append(theirAddr).append("\n"); sb.append("sub version: ").append(subVer).append("\n"); sb.append("best height: ").append(bestHeight).append("\n"); sb.append("delay tx relay: ").append(relayTxesBeforeFilter).append("\n"); return sb.toString(); } public VersionMessage duplicate() { VersionMessage v = new VersionMessage(params, (int) bestHeight, relayTxesBeforeFilter); v.clientVersion = clientVersion; v.localServices = localServices; v.time = time; v.myAddr = myAddr; v.theirAddr = theirAddr; v.subVer = subVer; return v; } /** * Appends the given user-agent information to the subVer field. The subVer is composed of a series of * name:version pairs separated by slashes in the form of a path. For example a typical subVer field for BitCoinJ * users might look like "/BitCoinJ:0.4-SNAPSHOT/MultiBit:1.2/" where libraries come further to the left.<p> * * There can be as many components as you feel a need for, and the version string can be anything, but it is * recommended to use A.B.C where A = major, B = minor and C = revision for software releases, and dates for * auto-generated source repository snapshots. A valid subVer begins and ends with a slash, therefore name * and version are not allowed to contain such characters. <p> * * Anything put in the "comments" field will appear in brackets and may be used for platform info, or anything * else. For example, calling <tt>appendToSubVer("MultiBit", "1.0", "Windows")</tt> will result in a subVer being * set of "/BitCoinJ:1.0/MultiBit:1.0(Windows)/. Therefore the / ( and ) characters are reserved in all these * components. If you don't want to add a comment (recommended), pass null.<p> * * See <a href="https://en.bitcoin.it/wiki/BIP_0014">BIP 14</a> for more information. * * @param comments Optional (can be null) platform or other node specific information. * @throws IllegalArgumentException if name, version or comments contains invalid characters. */ public void appendToSubVer(String name, String version, @Nullable String comments) { checkSubVerComponent(name); checkSubVerComponent(version); if (comments != null) { checkSubVerComponent(comments); subVer = subVer.concat(String.format("%s:%s(%s)/", name, version, comments)); } else { subVer = subVer.concat(String.format("%s:%s/", name, version)); } } private static void checkSubVerComponent(String component) { if (component.contains("/") || component.contains("(") || component.contains(")")) throw new IllegalArgumentException("name contains invalid characters"); } /** * Returns true if the clientVersion field is >= Pong.MIN_PROTOCOL_VERSION. If it is then ping() is usable. */ public boolean isPingPongSupported() { return clientVersion >= Pong.MIN_PROTOCOL_VERSION; } /** * Returns true if the clientVersion field is >= FilteredBlock.MIN_PROTOCOL_VERSION. If it is then Bloom filtering * is available and the memory pool of the remote peer will be queried when the downloadData property is true. */ public boolean isBloomFilteringSupported() { return clientVersion >= FilteredBlock.MIN_PROTOCOL_VERSION; } }
package net.techreadiness.persistence.domain; import java.io.Serializable; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.OrderBy; import javax.persistence.Table; import net.techreadiness.persistence.AuditedBaseEntity; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; /** * The persistent class for the view_def database table. * */ @Entity @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) @Table(name = "view_def") public class ViewDefDO extends AuditedBaseEntity implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name = "view_def_id", unique = true, nullable = false) private Long viewDefId; @Column(nullable = true, length = 200) private String name; @Column(name = "collapsible") private boolean collapsible; @Column(name = "collapsed_by_default") private boolean collapsedByDefault; @Column(name = "column1_width", nullable = true) private String column1Width; @Column(name = "column1_label_width", nullable = true) private String column1LabelWidth; @Column(name = "column2_width", nullable = true) private String column2Width; @Column(name = "column2_label_width", nullable = true) private String column2LabelWidth; @Column(name = "column3_width", nullable = true) private String column3Width; @Column(name = "column3_label_width", nullable = true) private String column3LabelWidth; // bi-directional many-to-one association to ScopeDO @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "scope_id", nullable = false) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) private ScopeDO scope; // bi-directional many-to-one association to ViewDefType @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "view_def_type_id", nullable = false) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) private ViewDefTypeDO viewDefType; // bi-directional many-to-one association to ViewDefFieldDO @OneToMany(mappedBy = "viewDef", fetch = FetchType.LAZY) @OrderBy("displayOrder, overrideName") @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) private List<ViewDefFieldDO> viewDefFields; @OneToMany(mappedBy = "viewDef") @OrderBy("displayOrder") @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) private List<ViewDefTextDO> viewDefTexts; public ViewDefDO() { } public Long getViewDefId() { return viewDefId; } public void setViewDefId(Long viewDefId) { this.viewDefId = viewDefId; } public ScopeDO getScope() { return scope; } public void setScope(ScopeDO scope) { this.scope = scope; } public ViewDefTypeDO getViewDefType() { return viewDefType; } public void setViewDefType(ViewDefTypeDO viewDefType) { this.viewDefType = viewDefType; } public List<ViewDefFieldDO> getViewDefFields() { return viewDefFields; } public void setViewDefFields(List<ViewDefFieldDO> viewDefFields) { this.viewDefFields = viewDefFields; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getColumn1Width() { return column1Width; } public void setColumn1Width(String column1Width) { this.column1Width = column1Width; } public String getColumn2Width() { return column2Width; } public void setColumn2Width(String column2Width) { this.column2Width = column2Width; } public String getColumn3Width() { return column3Width; } public void setColumn3Width(String column3Width) { this.column3Width = column3Width; } public boolean isCollapsible() { return collapsible; } public void setCollapsible(boolean collapsible) { this.collapsible = collapsible; } public boolean isCollapsedByDefault() { return collapsedByDefault; } public void setCollapsedByDefault(boolean collapsedByDefault) { this.collapsedByDefault = collapsedByDefault; } public String getColumn1LabelWidth() { return column1LabelWidth; } public void setColumn1LabelWidth(String column1LabelWidth) { this.column1LabelWidth = column1LabelWidth; } public String getColumn2LabelWidth() { return column2LabelWidth; } public void setColumn2LabelWidth(String column2LabelWidth) { this.column2LabelWidth = column2LabelWidth; } public String getColumn3LabelWidth() { return column3LabelWidth; } public void setColumn3LabelWidth(String column3LabelWidth) { this.column3LabelWidth = column3LabelWidth; } public List<ViewDefTextDO> getViewDefTexts() { return viewDefTexts; } public void setViewDefTexts(List<ViewDefTextDO> viewDefTexts) { this.viewDefTexts = viewDefTexts; } }
/* * Copyright 2006-2021 Prowide * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.prowidesoftware.swift.model.field; import com.prowidesoftware.swift.model.Tag; import com.prowidesoftware.Generated; import com.prowidesoftware.deprecation.ProwideDeprecated; import com.prowidesoftware.deprecation.TargetYear; import java.io.Serializable; import java.util.Locale; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import com.prowidesoftware.swift.model.field.MultiLineField; import org.apache.commons.lang3.StringUtils; import com.prowidesoftware.swift.model.field.SwiftParseUtils; import com.prowidesoftware.swift.model.field.Field; import com.prowidesoftware.swift.model.*; import com.prowidesoftware.swift.utils.SwiftFormatUtils; import com.google.gson.JsonObject; import com.google.gson.JsonParser; /** * SWIFT MT Field 45B. * <p> * Model and parser for field 45B of a SWIFT MT message. * * <p>Subfields (components) Data types * <ol> * <li><code>String</code></li> * </ol> * * <p>Structure definition * <ul> * <li>validation pattern: <code>CUSTOM</code></li> * <li>parser pattern: <code>S</code></li> * <li>components pattern: <code>S</code></li> * </ul> * * <p> * This class complies with standard release <strong>SRU2021</strong> */ @SuppressWarnings("unused") @Generated public class Field45B extends StructuredNarrativeField implements Serializable, NarrativeContainer, MultiLineField { /** * Constant identifying the SRU to which this class belongs to. */ public static final int SRU = 2021; private static final long serialVersionUID = 1L; /** * Constant with the field name 45B. */ public static final String NAME = "45B"; /** * Same as NAME, intended to be clear when using static imports. */ public static final String F_45B = "45B"; /** * Default constructor. Creates a new field setting all components to null. */ public Field45B() { super(); } /** * Creates a new field and initializes its components with content from the parameter value. * @param value complete field value including separators and CRLF */ public Field45B(final String value) { super(value); } /** * Creates a new field and initializes its components with content from the parameter tag. * The value is parsed with {@link #parse(String)} * @throws IllegalArgumentException if the parameter tag is null or its tagname does not match the field name * @since 7.8 */ public Field45B(final Tag tag) { this(); if (tag == null) { throw new IllegalArgumentException("tag cannot be null."); } if (!StringUtils.equals(tag.getName(), "45B")) { throw new IllegalArgumentException("cannot create field 45B from tag "+tag.getName()+", tagname must match the name of the field."); } parse(tag.getValue()); } /** * Creates a new field from a Narrative instance. * @see Narrative#builder * @param narrative a not-null narrative to use as field value * @since 8.1.0 */ public Field45B(final Narrative narrative) { this(narrative.getValue()); } /** * Copy constructor. * Initializes the components list with a deep copy of the source components list. * @param source a field instance to copy * @since 7.7 */ public static Field45B newInstance(Field45B source) { Field45B cp = new Field45B(); cp.setComponents(new ArrayList<>(source.getComponents())); return cp; } /** * Create a Tag with this field name and the given value. * Shorthand for <code>new Tag(NAME, value)</code> * @see #NAME * @since 7.5 */ public static Tag tag(final String value) { return new Tag(NAME, value); } /** * Create a Tag with this field name and an empty string as value. * Shorthand for <code>new Tag(NAME, "")</code> * @see #NAME * @since 7.5 */ public static Tag emptyTag() { return new Tag(NAME, ""); } /** * Returns the field validator pattern. */ @Override public final String validatorPattern() { return "CUSTOM"; } /** * Set the component 1 (Narrative). * * @param component1 the Narrative to set * @return the field object to enable build pattern */ public Field45B setComponent1(String component1) { setComponent(1, component1); return this; } /** * Set the Narrative (component 1). * * @param component1 the Narrative to set * @return the field object to enable build pattern */ public Field45B setNarrative(String component1) { return setComponent1(component1); } /** * Returns the field's name composed by the field number and the letter option (if any). * @return the static value of Field45B.NAME */ @Override public String getName() { return NAME; } /** * Gets the first occurrence form the tag list or null if not found. * @return null if not found o block is null or empty * @param block may be null or empty */ public static Field45B get(final SwiftTagListBlock block) { if (block == null || block.isEmpty()) { return null; } final Tag t = block.getTagByName(NAME); if (t == null) { return null; } return new Field45B(t); } /** * Gets the first instance of Field45B in the given message. * @param msg may be empty or null * @return null if not found or msg is empty or null * @see #get(SwiftTagListBlock) */ public static Field45B get(final SwiftMessage msg) { if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) { return null; } return get(msg.getBlock4()); } /** * Gets a list of all occurrences of the field Field45B in the given message * an empty list is returned if none found. * @param msg may be empty or null in which case an empty list is returned * @see #getAll(SwiftTagListBlock) */ public static List<Field45B> getAll(final SwiftMessage msg) { if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) { return java.util.Collections.emptyList(); } return getAll(msg.getBlock4()); } /** * Gets a list of all occurrences of the field Field45B from the given block * an empty list is returned if none found. * * @param block may be empty or null in which case an empty list is returned */ public static List<Field45B> getAll(final SwiftTagListBlock block) { final List<Field45B> result = new ArrayList<>(); if (block == null || block.isEmpty()) { return result; } final Tag[] arr = block.getTagsByName(NAME); if (arr != null && arr.length > 0) { for (final Tag f : arr) { result.add(new Field45B(f)); } } return result; } /** * Returns a specific line from the field's value. * * @see MultiLineField#getLine(int) * @param line a reference to a specific line in the field, first line being 1 * @return line content or null if not present or if line number is above the expected * @since 7.7 */ public String getLine(int line) { return getLine(line, 0); } /** * Returns a specific line from the field's value. * * @see MultiLineField#getLine(int, int) * @param line a reference to a specific line in the field, first line being 1 * @param offset an optional component number used as offset when counting lines * @return line content or null if not present or if line number is above the expected * @since 7.7 */ public String getLine(int line, int offset) { Field45B cp = newInstance(this); return getLine(cp, line, null, offset); } /** * Returns the field value split into lines. * * @see MultiLineField#getLines() * @return lines content or empty list if field's value is empty * @since 7.7 */ public List<String> getLines() { return SwiftParseUtils.getLines(getValue()); } /** * Returns the field value starting at the offset component, split into lines. * * @see MultiLineField#getLines(int) * @param offset an optional component number used as offset when counting lines * @return found lines or empty list if lines are not present or the offset is invalid * @since 7.7 */ public List<String> getLines(int offset) { Field45B cp = newInstance(this); return SwiftParseUtils.getLines(getLine(cp, null, null, offset)); } /** * Returns a specific subset of lines from the field's value, given a range. * * @see MultiLineField#getLinesBetween(int, int ) * @param start a reference to a specific line in the field, first line being 1 * @param end a reference to a specific line in the field, must be greater than start * @return found lines or empty list if value is empty * @since 7.7 */ public List<String> getLinesBetween(int start, int end) { return getLinesBetween(start, end, 0); } /** * Returns a specific subset of lines from the field's value, starting at the offset component. * * @see MultiLineField#getLinesBetween(int start, int end, int offset) * @param start a reference to a specific line in the field, first line being 1 * @param end a reference to a specific line in the field, must be greater than start * @param offset an optional component number used as offset when counting lines * @return found lines or empty list if lines are not present or the offset is invalid * @since 7.7 */ public List<String> getLinesBetween(int start, int end, int offset) { Field45B cp = newInstance(this); return SwiftParseUtils.getLines(getLine(cp, start, end, offset)); } /** * This method deserializes the JSON data into a Field45B object. * @param json JSON structure including tuples with label and value for all field components * @return a new field instance with the JSON data parsed into field components or an empty field id the JSON is invalid * @since 7.10.3 * @see Field#fromJson(String) */ public static Field45B fromJson(final String json) { final Field45B field = new Field45B(); final JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject(); NarrativeContainerJsonUtils.fromJson(jsonObject, json, field); return field; } @Deprecated @com.prowidesoftware.deprecation.ProwideDeprecated(phase3=com.prowidesoftware.deprecation.TargetYear.SRU2022) public static final Integer NARRATIVE = 1; /** * @deprecated use getValue() instead */ @Deprecated @com.prowidesoftware.deprecation.ProwideDeprecated(phase3=com.prowidesoftware.deprecation.TargetYear.SRU2022) public String getNarrative() { com.prowidesoftware.deprecation.DeprecationUtils.phase2(this.getClass(), "getNarrative()", "Use getValue() instead"); return getValue(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.etcd.springboot; import javax.annotation.Generated; import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon; import org.apache.camel.support.jsse.SSLContextParameters; import org.springframework.boot.context.properties.ConfigurationProperties; /** * The camel etcd component allows you to work with Etcd, a distributed reliable * key-value store. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.etcd") public class EtcdComponentConfiguration extends ComponentConfigurationPropertiesCommon { /** * Whether to enable auto configuration of the etcd component. This is * enabled by default. */ private Boolean enabled; /** * To set the URIs the client connects. */ private String uris; /** * To configure security using SSLContextParameters. The option is a * org.apache.camel.support.jsse.SSLContextParameters type. */ private String sslContextParameters; /** * The user name to use for basic authentication. */ private String userName; /** * The password to use for basic authentication. */ private String password; /** * Sets the common configuration shared among endpoints */ private EtcdConfigurationNestedConfiguration configuration; /** * Enable usage of global SSL context parameters. */ private Boolean useGlobalSslContextParameters = false; /** * Whether the component should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities */ private Boolean basicPropertyBinding = false; /** * Whether the producer should be started lazy (on the first message). By * starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during starting * and cause the route to fail being started. By deferring this startup to * be lazy then the startup failure can be handled during routing messages * via Camel's routing error handlers. Beware that when the first message is * processed then creating and starting the producer may take a little time * and prolong the total processing time of the processing. */ private Boolean lazyStartProducer = false; /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to pickup * incoming messages, or the likes, will now be processed as a message and * handled by the routing Error Handler. By default the consumer will use * the org.apache.camel.spi.ExceptionHandler to deal with exceptions, that * will be logged at WARN or ERROR level and ignored. */ private Boolean bridgeErrorHandler = false; public String getUris() { return uris; } public void setUris(String uris) { this.uris = uris; } public String getSslContextParameters() { return sslContextParameters; } public void setSslContextParameters(String sslContextParameters) { this.sslContextParameters = sslContextParameters; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public EtcdConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( EtcdConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public Boolean getUseGlobalSslContextParameters() { return useGlobalSslContextParameters; } public void setUseGlobalSslContextParameters( Boolean useGlobalSslContextParameters) { this.useGlobalSslContextParameters = useGlobalSslContextParameters; } public Boolean getBasicPropertyBinding() { return basicPropertyBinding; } public void setBasicPropertyBinding(Boolean basicPropertyBinding) { this.basicPropertyBinding = basicPropertyBinding; } public Boolean getLazyStartProducer() { return lazyStartProducer; } public void setLazyStartProducer(Boolean lazyStartProducer) { this.lazyStartProducer = lazyStartProducer; } public Boolean getBridgeErrorHandler() { return bridgeErrorHandler; } public void setBridgeErrorHandler(Boolean bridgeErrorHandler) { this.bridgeErrorHandler = bridgeErrorHandler; } public static class EtcdConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.etcd.EtcdConfiguration.class; /** * To set the URIs the client connects. */ private String uris = "http://localhost:2379,http://localhost:4001"; /** * To configure security using SSLContextParameters. */ private SSLContextParameters sslContextParameters; /** * The user name to use for basic authentication. */ private String userName; /** * The password to use for basic authentication. */ private String password; /** * To send an empty message in case of timeout watching for a key. */ private Boolean sendEmptyExchangeOnTimeout = false; /** * To apply an action recursively. */ private Boolean recursive = false; /** * To set the lifespan of a key in milliseconds. */ private Integer timeToLive; /** * To set the maximum time an action could take to complete. */ private Long timeout; /** * The index to watch from */ private Long fromIndex = 0L; /** * The path to look for for service discovery */ private String servicePath = "/services/"; public String getUris() { return uris; } public void setUris(String uris) { this.uris = uris; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } public void setSslContextParameters( SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public Boolean getSendEmptyExchangeOnTimeout() { return sendEmptyExchangeOnTimeout; } public void setSendEmptyExchangeOnTimeout( Boolean sendEmptyExchangeOnTimeout) { this.sendEmptyExchangeOnTimeout = sendEmptyExchangeOnTimeout; } public Boolean getRecursive() { return recursive; } public void setRecursive(Boolean recursive) { this.recursive = recursive; } public Integer getTimeToLive() { return timeToLive; } public void setTimeToLive(Integer timeToLive) { this.timeToLive = timeToLive; } public Long getTimeout() { return timeout; } public void setTimeout(Long timeout) { this.timeout = timeout; } public Long getFromIndex() { return fromIndex; } public void setFromIndex(Long fromIndex) { this.fromIndex = fromIndex; } public String getServicePath() { return servicePath; } public void setServicePath(String servicePath) { this.servicePath = servicePath; } } }
/** * Copyright 2014-2017 Hippo B.V. (http://www.onehippo.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onehippo.forge.exdocpicker.impl.field; import java.io.Serializable; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.wicket.model.IModel; import org.apache.wicket.util.value.IValueMap; import org.apache.wicket.util.value.ValueMap; import org.hippoecm.frontend.dialog.AbstractDialog; import org.hippoecm.frontend.plugin.IPluginContext; import org.hippoecm.frontend.plugin.config.IPluginConfig; import org.onehippo.forge.exdocpicker.api.ExternalDocumentCollection; import org.onehippo.forge.exdocpicker.api.ExternalDocumentServiceContext; import org.onehippo.forge.exdocpicker.api.ExternalDocumentServiceFacade; import org.onehippo.forge.exdocpicker.api.PluginConstants; import org.onehippo.forge.exdocpicker.impl.SimpleExternalDocumentCollection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Abstract external document(s) picker dialog class. */ public abstract class AbstractExternalDocumentFieldBrowserDialog extends AbstractDialog<ExternalDocumentCollection<Serializable>> { private static final long serialVersionUID = 1L; private static Logger log = LoggerFactory.getLogger(AbstractExternalDocumentFieldBrowserDialog.class); /** * Dialog title model. */ private final IModel<String> titleModel; /** * {@link ExternalDocumentServiceFacade} instance. */ private final ExternalDocumentServiceFacade<Serializable> exdocService; /** * Currently picked external documents in the list view UI by end user. */ private final Set<Serializable> pickedExtDocsInUI = new LinkedHashSet<Serializable>(); /** * Currently selected external documents in the document variant node data. */ private ExternalDocumentCollection<Serializable> selectedExtDocsInVariantNode; /** * Searched external documents to show in the list view UI. */ private ExternalDocumentCollection<Serializable> searchedExtDocs = new SimpleExternalDocumentCollection<Serializable>(); /** * Page size. */ private int pageSize; /** * Dialog size. */ private final IValueMap dialogSize; /** * {@link ExternalDocumentServiceContext} instance. */ private final ExternalDocumentServiceContext extDocServiceContext; /** * Constructs external document(s) picker dialog. * @param titleModel title model * @param extDocServiceContext {@link ExternalDocumentServiceContext} instance * @param exdocService {@link ExternalDocumentServiceFacade} instance * @param model the model containing the currently selected external documents in the document node data */ public AbstractExternalDocumentFieldBrowserDialog(IModel<String> titleModel, final ExternalDocumentServiceContext extDocServiceContext, final ExternalDocumentServiceFacade<Serializable> exdocService, IModel<ExternalDocumentCollection<Serializable>> model) { super(model); setOutputMarkupId(true); this.titleModel = titleModel; this.extDocServiceContext = extDocServiceContext; this.exdocService = exdocService; final String dialogSizeParam = getPluginConfig().getString(PluginConstants.PARAM_DIALOG_SIZE, PluginConstants.DEFAULT_DIALOG_SIZE); dialogSize = new ValueMap(dialogSizeParam).makeImmutable(); pageSize = getPluginConfig().getInt(PluginConstants.PARAM_PAGE_SIZE, PluginConstants.DEFAULT_PAGE_SIZE); selectedExtDocsInVariantNode = getModelObject(); pickedExtDocsInUI.clear(); for (Iterator<? extends Serializable> it = selectedExtDocsInVariantNode.iterator(); it.hasNext();) { pickedExtDocsInUI.add(it.next()); } if (getModel().getObject() == null) { setOkVisible(false); setOkEnabled(false); } initializeSearchedExternalDocuments(); initializeDataListView(); } /** * {@inheritDoc} */ @Override protected void onOk() { if (pickedExtDocsInUI != null) { if (isSingleSelectionMode()) { selectedExtDocsInVariantNode.clear(); Serializable curDoc = null; // when single selection mode, let's add the last added item only. for (Iterator<Serializable> it = pickedExtDocsInUI.iterator(); it.hasNext();) { curDoc = it.next(); } if (curDoc != null) { selectedExtDocsInVariantNode.add(curDoc); } exdocService.setFieldExternalDocuments(extDocServiceContext, selectedExtDocsInVariantNode); } else { boolean added = false; for (Serializable doc : pickedExtDocsInUI) { if (!selectedExtDocsInVariantNode.contains(doc)) { selectedExtDocsInVariantNode.add(doc); added = true; } } if (added) { exdocService.setFieldExternalDocuments(extDocServiceContext, selectedExtDocsInVariantNode); } } } } /** * {@inheritDoc} */ @Override public IModel<String> getTitle() { return titleModel; } /** * {@inheritDoc} */ @Override public IValueMap getProperties() { return dialogSize; } /** * Returns the plugin config. * @return the plugin config */ protected IPluginConfig getPluginConfig() { return extDocServiceContext.getPluginConfig(); } /** * Returns the plugin context. * @return the plugin context */ protected IPluginContext getPluginContext() { return extDocServiceContext.getPluginContext(); } /** * Returns {@link ExternalDocumentServiceFacade} instance. * @return {@link ExternalDocumentServiceFacade} instance */ protected ExternalDocumentServiceFacade<Serializable> getExternalDocumentServiceFacade() { return exdocService; } /** * Returns {@link ExternalDocumentServiceContext} instance. * @return {@link ExternalDocumentServiceContext} instance */ protected ExternalDocumentServiceContext getExternalDocumentServiceContext() { return extDocServiceContext; } /** * Returns currently picked external documents in the list view UI. * @return currently picked external documents in the list view UI */ protected Set<Serializable> getPickedExternalDocuments() { return pickedExtDocsInUI; } /** * Returns currently selected external documents in the document variant data node. * @return currently selected external documents in the document variant data node */ protected ExternalDocumentCollection<Serializable> getSelectedExternalDocuments() { return selectedExtDocsInVariantNode; } /** * Returns searched external documents to show in the list view UI. * @return searched external documents to show in the list view UI */ protected ExternalDocumentCollection<Serializable> getSearchedExternalDocuments() { return searchedExtDocs; } /** * Returns true if the selection mode in UI is 'single'. * @return true if the selection mode in UI is 'single' */ protected boolean isSingleSelectionMode() { return StringUtils.equalsIgnoreCase(PluginConstants.SELECTION_MODE_SINGLE, getPluginConfig() .getString(PluginConstants.PARAM_SELECTION_MODE, PluginConstants.SELECTION_MODE_MULTIPLE)); } /** * Returns the page size. * @return the page size */ protected int getPageSize() { return pageSize; } /** * Returns the dialog size. * @return the dialog size */ protected IValueMap getDialogSize() { return dialogSize; } /** * Initializes the {@link #searchedExtDocs} on construction. * Invoked during the construction of this dialog instance. */ abstract protected void initializeSearchedExternalDocuments(); /** * Initializes the data list view UI. * Invoked during the construction of this dialog instance. */ abstract protected void initializeDataListView(); }
/*L * Copyright SAIC * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/stats-analysis/LICENSE.txt for details. */ package gov.nih.nci.caintegrator.studyQueryService.test.germline; import gov.nih.nci.caintegrator.domain.analysis.snp.bean.SNPAssociationFinding; import gov.nih.nci.caintegrator.domain.finding.bean.Finding; import gov.nih.nci.caintegrator.domain.annotation.gene.bean.GeneBiomarker; import gov.nih.nci.caintegrator.studyQueryService.dto.annotation.AnnotationCriteria; import gov.nih.nci.caintegrator.studyQueryService.dto.germline.*; import gov.nih.nci.caintegrator.studyQueryService.germline.FindingsManager; import gov.nih.nci.caintegrator.studyQueryService.germline.FindingsHandler; import gov.nih.nci.caintegrator.util.ArithematicOperator; import java.util.*; import java.sql.Connection; /** * Author: Ram Bhattaru * Date: Jul 21, 2006 * Time: 4:38:44 PM */ public class SNPAssociationFindingsTest extends CGEMSTest { private SNPAssociationFindingCriteriaDTO safDTO; public void setUp() throws Exception { super.setUp(); safDTO = (SNPAssociationFindingCriteriaDTO) ctx.getBean("snpAssociationFindingsCriteria"); safDTO.setStudyCriteria(studyCrit); safDTO.setAnnotationCriteria(annotCrit); } public void testSNPAssocAnalysisFindingCriteriaDTO() { // 1. setup Annotation Criteria //setUpSNPPhysicalPositionCrit(); //setUpDBSnpCrit(); setUpPanelCrit(); //setUpGeneBiomarkerCrit(); //setSNPAssociationAnalysisCriteria(); //setSNPAssociationGroupCriteria(); setSNPFindingCriteria(); studyCrit.setName("CGEMS Prostate Cancer WGAS Phase 1"); safDTO.setStudyCriteria(studyCrit); executeSearch(0, 101); } public Collection executeSearch(int startIndex, int endIndex) { try { Long t1 = System.currentTimeMillis(); Collection<? extends Finding> findings = manager.getFindings(safDTO, startIndex, endIndex); /* Connection connection = manager.getSnpAssociationFindingsHandler().getSessionFactory().getCurrentSession().connection(); connection.close(); */ System.out.println("RESULTS COUNT: " + findings.size()); for (Iterator<? extends Finding> iterator = findings.iterator(); iterator.hasNext();) { SNPAssociationFinding finding = (SNPAssociationFinding) iterator.next(); System.out.println("ID: " + finding.getId()); System.out.println("pValue" + finding.getPvalue()); System.out.println("Rank" + finding.getRank()); System.out.println("DBSNP ID: " + finding.getSnpAnnotation().getDbsnpId()); System.out.println("Analysis Name: " + finding.getSnpAssociationAnalysis().getName()); System.out.println("Physical Position: " + finding.getSnpAnnotation().getChromosomeLocation()); System.out.println("Chromosome: " + finding.getSnpAnnotation().getChromosomeName()); System.out.print("Associated Genes: " ); Collection<GeneBiomarker> bioMarkers = finding.getSnpAnnotation().getGeneBiomarkerCollection(); for (Iterator<GeneBiomarker> iterator1 = bioMarkers.iterator(); iterator1.hasNext();) { GeneBiomarker geneBiomarker = iterator1.next(); System.out.println(geneBiomarker.getHugoGeneSymbol() + " "); System.out.println("START PhyscialLocation of the bioMarker:" + geneBiomarker.getStartPhyscialLocation()); System.out.println("END PhyscialLocation of the bioMarker:" + geneBiomarker.getEndPhysicalLocation()); System.out.println("END Chromosome of the bioMarker:" + geneBiomarker.getChromosome()); } } Long t2 = System.currentTimeMillis(); System.out.println("Time Taken: " + (t2 - t1) + " ms" ); return findings; } catch (Throwable t) { System.out.println("CGEMS Exception: "); t.printStackTrace(); } return null; } private void setSNPAssociationGroupCriteria() { AnalysisGroupCriteria groupCrit = new AnalysisGroupCriteria(new Long(1)); String[] names = new String[] {"Test Name for 9999", "Both Name And Method", "Only Name"}; groupCrit.setNames(names); safDTO.setAnalysisGroupCriteria(groupCrit); } private void setSNPFindingCriteria() { //safDTO.setpValue(new Float(0.4), ArithematicOperator.LE); SNPAssociationAnalysisCriteria assocCrit = new SNPAssociationAnalysisCriteria(new Long(1)); //assocCrit.setName("Incidence density sampling, Unadjusted score test"); assocCrit.setAnalysisCode("S1C1"); Collection<SNPAssociationAnalysisCriteria> list = new ArrayList<SNPAssociationAnalysisCriteria>(); list.add(assocCrit); safDTO.setSnpAssociationAnalysisCriteriaCollection(list); safDTO.setRank(new Integer(1000), ArithematicOperator.LE); } private void setSNPAssociationAnalysisCriteria() { Collection analysisCrits = new ArrayList<SNPAssociationAnalysisCriteria>(); SNPAssociationAnalysisCriteria methodAndNameCrit = new SNPAssociationAnalysisCriteria(new Long(1)); //methodAndNameCrit.setMethods("P-Test"); // methodAndNameCrit.setName("Incidence density sampling, Unadjusted score test"); methodAndNameCrit.setAnalysisCode("S1C1"); analysisCrits.add(methodAndNameCrit); /* SNPAssociationAnalysisCriteria methodOnlyCrit = new SNPAssociationAnalysisCriteria(); methodOnlyCrit.setMethods("Q-Test"); analysisCrits.add(methodOnlyCrit); SNPAssociationAnalysisCriteria nameOnlyCrit = new SNPAssociationAnalysisCriteria(); nameOnlyCrit.setName("Cluster"); analysisCrits.add(nameOnlyCrit); */ safDTO.setSnpAssociationAnalysisCriteriaCollection(analysisCrits); } public void testPopulateFindings() { setUpSNPPhysicalPositionCrit(); // setUpGeneBiomarkerCrit(); //setSNPAssociationAnalysisCriteria(); //setSNPAssociationGroupCriteria(); setUpPanelCrit(); setSNPFindingCriteria(); studyCrit.setName("CGEMS Prostate Cancer WGAS Phase 1"); safDTO.setStudyCriteria(studyCrit); try { HashSet actualBatchFindings = new HashSet(); final List findingsToBePopulated = Collections.synchronizedList(new ArrayList()); new Thread(new Runnable() { public void run() { try { manager.populateFindings(safDTO, findingsToBePopulated); } catch(Throwable t) { t.printStackTrace(); System.out.println("Error from FindingsManager.populateFindings call: "); } } } ).start(); boolean sleep = true; int count = 1; int noOfResults = 0; boolean run = true; do { synchronized(findingsToBePopulated) { if (findingsToBePopulated.size() > 0) { run = true; actualBatchFindings = (HashSet) findingsToBePopulated.remove(0); for (Iterator iterator = actualBatchFindings.iterator(); iterator.hasNext();) { SNPAssociationFinding finding = (SNPAssociationFinding) iterator.next(); System.out.println("ID: " + finding.getId()); System.out.println("pValue" + finding.getPvalue()); System.out.println("Rank" + finding.getRank()); System.out.println("DBSNP ID: " + finding.getSnpAnnotation().getDbsnpId()); System.out.println("Analysis Name: " + finding.getSnpAssociationAnalysis().getName()); System.out.println("Physical Position: " + finding.getSnpAnnotation().getChromosomeLocation()); System.out.println("Chromosome: " + finding.getSnpAnnotation().getChromosomeName()); System.out.print("Associated Genes: " ); } noOfResults += actualBatchFindings.size(); System.out.println("WRITTEN BATCH: " + count++ + " SIZE: " + actualBatchFindings.size() + "\n\n"); if (actualBatchFindings.size() == 0) { /* means no more to results are coming. Finished */ break; } } } Thread.currentThread().sleep(10); for (Iterator iterator = findingsToBePopulated.iterator(); iterator.hasNext();) { Object toBeGCed = iterator.next(); toBeGCed = null; } actualBatchFindings = null; } while(true); System.out.println("ALL RESULTS WERE RECEIVED TOTAL: " + noOfResults); } catch (Exception e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.HasThread; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; /** * Runs periodically to determine if the HLog should be rolled. * * NOTE: This class extends Thread rather than Chore because the sleep time * can be interrupted when there is something to do, rather than the Chore * sleep time which is invariant. */ @InterfaceAudience.Private class LogRoller extends HasThread implements WALActionsListener { static final Log LOG = LogFactory.getLog(LogRoller.class); private final ReentrantLock rollLock = new ReentrantLock(); private final AtomicBoolean rollLog = new AtomicBoolean(false); private final Server server; protected final RegionServerServices services; private volatile long lastrolltime = System.currentTimeMillis(); // Period to roll log. private final long rollperiod; private final int threadWakeFrequency; /** @param server */ public LogRoller(final Server server, final RegionServerServices services) { super(); this.server = server; this.services = services; this.rollperiod = this.server.getConfiguration(). getLong("hbase.regionserver.logroll.period", 3600000); this.threadWakeFrequency = this.server.getConfiguration(). getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000); } @Override public void run() { while (!server.isStopped()) { long now = System.currentTimeMillis(); boolean periodic = false; if (!rollLog.get()) { periodic = (now - this.lastrolltime) > this.rollperiod; if (!periodic) { synchronized (rollLog) { try { rollLog.wait(this.threadWakeFrequency); } catch (InterruptedException e) { // Fall through } } continue; } // Time for periodic roll if (LOG.isDebugEnabled()) { LOG.debug("Hlog roll period " + this.rollperiod + "ms elapsed"); } } else if (LOG.isDebugEnabled()) { LOG.debug("HLog roll requested"); } rollLock.lock(); // FindBugs UL_UNRELEASED_LOCK_EXCEPTION_PATH try { this.lastrolltime = now; // This is array of actual region names. byte [][] regionsToFlush = this.services.getWAL().rollWriter(rollLog.get()); if (regionsToFlush != null) { for (byte [] r: regionsToFlush) scheduleFlush(r); } } catch (FailedLogCloseException e) { server.abort("Failed log close in log roller", e); } catch (java.net.ConnectException e) { server.abort("Failed log close in log roller", e); } catch (IOException ex) { // Abort if we get here. We probably won't recover an IOE. HBASE-1132 server.abort("IOE in log roller", RemoteExceptionHandler.checkIOException(ex)); } catch (Exception ex) { LOG.error("Log rolling failed", ex); server.abort("Log rolling failed", ex); } finally { try { rollLog.set(false); } finally { rollLock.unlock(); } } } LOG.info("LogRoller exiting."); } /** * @param encodedRegionName Encoded name of region to flush. */ private void scheduleFlush(final byte [] encodedRegionName) { boolean scheduled = false; HRegion r = this.services.getFromOnlineRegions(Bytes.toString(encodedRegionName)); FlushRequester requester = null; if (r != null) { requester = this.services.getFlushRequester(); if (requester != null) { requester.requestFlush(r); scheduled = true; } } if (!scheduled) { LOG.warn("Failed to schedule flush of " + Bytes.toString(encodedRegionName) + ", region=" + r + ", requester=" + requester); } } public void logRollRequested() { synchronized (rollLog) { rollLog.set(true); rollLog.notifyAll(); } } /** * Called by region server to wake up this thread if it sleeping. * It is sleeping if rollLock is not held. */ public void interruptIfNecessary() { try { rollLock.lock(); this.interrupt(); } finally { rollLock.unlock(); } } protected HLog getWAL() { return this.services.getWAL(); } @Override public void preLogRoll(Path oldPath, Path newPath) throws IOException { // Not interested } @Override public void postLogRoll(Path oldPath, Path newPath) throws IOException { // Not interested } @Override public void preLogArchive(Path oldPath, Path newPath) throws IOException { // Not interested } @Override public void postLogArchive(Path oldPath, Path newPath) throws IOException { // Not interested } @Override public void visitLogEntryBeforeWrite(HRegionInfo info, HLogKey logKey, WALEdit logEdit) { // Not interested. } @Override public void visitLogEntryBeforeWrite(HTableDescriptor htd, HLogKey logKey, WALEdit logEdit) { //Not interested } @Override public void logCloseRequested() { // not interested } } class MetaLogRoller extends LogRoller { public MetaLogRoller(Server server, RegionServerServices services) { super(server, services); } @Override protected HLog getWAL() { return services.getMetaWAL(); } }
/* ******************************************************************************* * Copyright (C) 2005-2010, International Business Machines Corporation and * * others. All Rights Reserved. * ******************************************************************************* */ package com.ibm.icu.dev.test.calendar; import java.util.Date; import java.util.Locale; import com.ibm.icu.impl.LocaleUtility; import com.ibm.icu.text.DateFormat; import com.ibm.icu.text.SimpleDateFormat; import com.ibm.icu.util.Calendar; import com.ibm.icu.util.CopticCalendar; import com.ibm.icu.util.EthiopicCalendar; import com.ibm.icu.util.GregorianCalendar; import com.ibm.icu.util.TimeZone; import com.ibm.icu.util.ULocale; /** * Tests for the <code>CopticCalendar</code> class. */ public class CopticTest extends CalendarTest { public static void main(String args[]) throws Exception { new CopticTest().run(args); } /** Constants to save typing. */ public static final int TOUT = CopticCalendar.TOUT; public static final int BABA = CopticCalendar.BABA; public static final int HATOR = CopticCalendar.HATOR; public static final int KIAHK = CopticCalendar.KIAHK; public static final int TOBA = CopticCalendar.TOBA; public static final int AMSHIR = CopticCalendar.AMSHIR; public static final int BARAMHAT = CopticCalendar.BARAMHAT; public static final int BARAMOUDA = CopticCalendar.BARAMOUDA; public static final int BASHANS = CopticCalendar.BASHANS; public static final int PAONA = CopticCalendar.PAONA; public static final int EPEP = CopticCalendar.EPEP; public static final int MESRA = CopticCalendar.MESRA; public static final int NASIE = CopticCalendar.NASIE; /* Test dates from: * "The Amharic Letters of Emperor Theodore of Ethiopia to Queen Victoria and * Her Special Envoy", David Appleyard, Girma Selasse Asfaw, Oxford University Press, * June 1 1979, ISBN: 0856726605, Longwood Pr Ltd * * Coptic Gregorian JD * 20/02/1579 29/10/1862 2401443 * 29/10/1581 05/07/1865 2402423 * 22/05/1582 29/01/1866 2402631 * 10/08/1582 17/04/1866 2402709 * 28/04/1583 05/01/1867 2402972 * 05/05/1584 13/01/1868 2403345 * * -------------------------------------------------- * * From the Calendrica applet: http://emr.cs.iit.edu/home/reingold/calendar-book/Calendrica.html * * Coptic Gregorian JD * 07/05/-284 01/01/0000 1721060 * 08/05/-283 01/01/0001 1721426 * 06/13/-1 29/08/0283 1824664 * * 01/01/0000 30/08/0283 1824665 * 01/01/0001 29/08/0284 1825030 * 01/01/0002 29/08/0285 1825395 * 01/01/0003 29/08/0286 1825760 * 01/01/0004 30/08/0287 1826126 * 05/13/0000 28/08/0284 1825029 * 05/13/0001 28/08/0285 1825394 * 05/13/0002 28/08/0286 1825759 * 05/13/0003 28/08/0287 1826124 * 06/13/0003 29/08/0287 1826125 first coptic leap year * 05/13/0004 28/08/0288 1826490 * * 06/02/1299 13/10/1582 2299159 * 07/02/1299 14/10/1582 2299160 Julian 04/10/1582 * 08/02/1299 15/10/1582 2299161 * 09/02/1299 16/10/1582 2299162 * * 23/04/1616 01/01/1900 2415021 * 23/04/1721 01/01/2005 2453372 * 05/13/2000 12/09/2284 2555529 */ /** A huge list of test cases to make sure that computeTime and computeFields * work properly for a wide range of data in the civil calendar. */ public void TestCases() { final TestCase[] tests = { // // The months in this table are 1-based rather than 0-based, // because it's easier to edit that way. // Coptic // Julian Day Era Year Month Day WkDay Hour Min Sec // // Dates from "Emporer Theodore..." new TestCase(2401442.5, 1, 1579, 2, 20, WED, 0, 0, 0), // Gregorian: 20/10/1862 new TestCase(2402422.5, 1, 1581, 10, 29, WED, 0, 0, 0), // Gregorian: 05/07/1865 new TestCase(2402630.5, 1, 1582, 5, 22, MON, 0, 0, 0), // Gregorian: 29/01/1866 new TestCase(2402708.5, 1, 1582, 8, 10, TUE, 0, 0, 0), // Gregorian: 17/04/1866 new TestCase(2402971.5, 1, 1583, 4, 28, SAT, 0, 0, 0), // Gregorian: 05/01/1867 new TestCase(2403344.5, 1, 1584, 5, 5, MON, 0, 0, 0), // Gregorian: 13/01/1868 new TestCase(1721059.5, 0, 285, 5, 7, SAT, 0, 0, 0), // Gregorian: 01/01/0000 new TestCase(1721425.5, 0, 284, 5, 8, MON, 0, 0, 0), // Gregorian: 01/01/0001 new TestCase(1824663.5, 0, 2, 13, 6, WED, 0, 0, 0), // Gregorian: 29/08/0283 new TestCase(1824664.5, 0, 1, 1, 1, THU, 0, 0, 0), // Gregorian: 30/08/0283 new TestCase(1825029.5, 1, 1, 1, 1, FRI, 0, 0, 0), // Gregorian: 29/08/0284 new TestCase(1825394.5, 1, 2, 1, 1, SAT, 0, 0, 0), // Gregorian: 29/08/0285 new TestCase(1825759.5, 1, 3, 1, 1, SUN, 0, 0, 0), // Gregorian: 29/08/0286 new TestCase(1826125.5, 1, 4, 1, 1, TUE, 0, 0, 0), // Gregorian: 30/08/0287 new TestCase(1825028.5, 0, 1, 13, 5, THU, 0, 0, 0), // Gregorian: 28/08/0284 new TestCase(1825393.5, 1, 1, 13, 5, FRI, 0, 0, 0), // Gregorian: 28/08/0285 new TestCase(1825758.5, 1, 2, 13, 5, SAT, 0, 0, 0), // Gregorian: 28/08/0286 new TestCase(1826123.5, 1, 3, 13, 5, SUN, 0, 0, 0), // Gregorian: 28/08/0287 new TestCase(1826124.5, 1, 3, 13, 6, MON, 0, 0, 0), // Gregorian: 29/08/0287 // above is first coptic leap year new TestCase(1826489.5, 1, 4, 13, 5, TUE, 0, 0, 0), // Gregorian: 28/08/0288 new TestCase(2299158.5, 1, 1299, 2, 6, WED, 0, 0, 0), // Gregorian: 13/10/1582 new TestCase(2299159.5, 1, 1299, 2, 7, THU, 0, 0, 0), // Gregorian: 14/10/1582 new TestCase(2299160.5, 1, 1299, 2, 8, FRI, 0, 0, 0), // Gregorian: 15/10/1582 new TestCase(2299161.5, 1, 1299, 2, 9, SAT, 0, 0, 0), // Gregorian: 16/10/1582 new TestCase(2415020.5, 1, 1616, 4, 23, MON, 0, 0, 0), // Gregorian: 01/01/1900 new TestCase(2453371.5, 1, 1721, 4, 23, SAT, 0, 0, 0), // Gregorian: 01/01/2005 new TestCase(2555528.5, 1, 2000, 13, 5, FRI, 0, 0, 0), // Gregorian: 12/09/2284 }; CopticCalendar testCalendar = new CopticCalendar(); testCalendar.setLenient(true); doTestCases(tests, testCalendar); } // basic sanity check that the conversion algorithm round-trips public void TestCopticToJD() { CopticCalendar cal = new CopticCalendar(); cal.clear(); for (int y = -2; y < 3; ++y) { for (int m = 0; m < 12; ++m) { // don't understand rules for 13th month for (int d = 1; d < 25; d += 3) { // play it safe on days per month int jd = CopticCalendar.copticToJD(y, m, d); cal.set(Calendar.JULIAN_DAY, jd); int eyear = cal.get(Calendar.EXTENDED_YEAR); int month = cal.get(Calendar.MONTH); int day = cal.get(Calendar.DAY_OF_MONTH); if (!(y == eyear && m == month && d == day)) { errln("y: " + y + " m: " + m + " d: " + d + " --> jd: " + jd + " --> y: " + eyear + " m: " + month + " d: " + day); } } } } } // basic check to see that we print out eras ok // eventually should modify to use locale strings and formatter appropriate to coptic calendar public void TestEraStart() { SimpleDateFormat fmt = new SimpleDateFormat("EEE MMM dd, yyyy GG"); SimpleDateFormat copticFmt = new SimpleDateFormat("EEE MMM dd, yyyy GG"); copticFmt.setCalendar(new CopticCalendar()); CopticCalendar cal = new CopticCalendar(1, 0, 1); assertEquals("Coptic Date", "Fri Jan 01, 0001 AD", copticFmt.format(cal)); assertEquals("Gregorian Date", "Fri Aug 29, 0284 AD", fmt.format(cal.getTime())); cal.set(Calendar.ERA, 0); cal.set(Calendar.YEAR, 1); assertEquals("Coptic Date", "Thu Jan 01, 0001 BC", copticFmt.format(cal)); assertEquals("Gregorian Date", "Thu Aug 30, 0283 AD", fmt.format(cal.getTime())); } public void TestBasic() { CopticCalendar cal = new CopticCalendar(); cal.clear(); cal.set(1000, 0, 30); logln("1000/0/30-> " + cal.get(YEAR) + "/" + cal.get(MONTH) + "/" + cal.get(DATE)); cal.clear(); cal.set(1, 0, 30); logln("1/0/30 -> " + cal.get(YEAR) + "/" + cal.get(MONTH) + "/" + cal.get(DATE)); } /** * Test limits of the Coptic calendar */ public void TestLimits() { Calendar cal = Calendar.getInstance(); cal.set(2007, Calendar.JANUARY, 1); CopticCalendar coptic = new CopticCalendar(); doLimitsTest(coptic, null, cal.getTime()); doTheoreticalLimitsTest(coptic, true); } /** * Test for track ticket 6379 - proper reporting of * maximum month lengths */ public void Test6379() { CopticCalendar copticCal = new CopticCalendar(); copticCal.clear(); for (int year = 1725; year < 1735; year++) { // Coptic 1725-01-01 = Gregorian 2008-09-11 boolean isLeap = ((year % 4) == 3); copticCal.set(Calendar.YEAR, year); int maxMonth = copticCal.getActualMaximum(Calendar.MONTH); for (int month = 0; month <= maxMonth; month++) { copticCal.set(Calendar.MONTH, month); int maxDayOfMonth = copticCal.getActualMaximum(Calendar.DAY_OF_MONTH); int expected = (month != maxMonth) ? 30 : (isLeap ? 6 : 5); if (maxDayOfMonth != expected) { errln("FAIL: Expected maximum " + expected + " days for month #" + (month + 1) + " - returned:" + maxDayOfMonth); } } } } public void TestCoverage() { { // new CopticCalendar(TimeZone) CopticCalendar cal = new CopticCalendar(TimeZone.getDefault()); if(cal == null){ errln("could not create CopticCalendar with TimeZone"); } } { // new CopticCalendar(ULocale) CopticCalendar cal = new CopticCalendar(ULocale.getDefault()); if(cal == null){ errln("could not create CopticCalendar with ULocale"); } } { // new CopticCalendar(Locale) CopticCalendar cal = new CopticCalendar(Locale.getDefault()); if(cal == null){ errln("could not create CopticCalendar with Locale"); } } { // new CopticCalendar(TimeZone, Locale) CopticCalendar cal = new CopticCalendar(TimeZone.getDefault(),Locale.getDefault()); if(cal == null){ errln("could not create CopticCalendar with TimeZone, Locale"); } } { // new CopticCalendar(TimeZone, ULocale) CopticCalendar cal = new CopticCalendar(TimeZone.getDefault(),ULocale.getDefault()); if(cal == null){ errln("could not create CopticCalendar with TimeZone, ULocale"); } } { // new CopticCalendar(Date) CopticCalendar cal = new CopticCalendar(new Date()); if(cal == null){ errln("could not create CopticCalendar with Date"); } } { // new CopticCalendar(int year, int month, int date) CopticCalendar cal = new CopticCalendar(1997, CopticCalendar.TOUT, 1); if(cal == null){ errln("could not create CopticCalendar with year,month,date"); } } { // new CopticCalendar(int year, int month, int date, int hour, int minute, int second) CopticCalendar cal = new CopticCalendar(1997, CopticCalendar.TOUT, 1, 1, 1, 1); if(cal == null){ errln("could not create CopticCalendar with year,month,date,hour,minute,second"); } } { // data CopticCalendar cal = new CopticCalendar(1997, CopticCalendar.TOUT, 1); Date time = cal.getTime(); String[] calendarLocales = { "am_ET", "gez_ET", "ti_ET" }; String[] formatLocales = { "en", "am", "am_ET", "gez", "ti" }; for (int i = 0; i < calendarLocales.length; ++i) { String calLocName = calendarLocales[i]; Locale calLocale = LocaleUtility.getLocaleFromName(calLocName); cal = new CopticCalendar(calLocale); for (int j = 0; j < formatLocales.length; ++j) { String locName = formatLocales[j]; Locale formatLocale = LocaleUtility.getLocaleFromName(locName); DateFormat format = DateFormat.getDateTimeInstance(cal, DateFormat.FULL, DateFormat.FULL, formatLocale); logln(calLocName + "/" + locName + " --> " + format.format(time)); } } } } public void TestYear() { // Gregorian Calendar Calendar gCal= new GregorianCalendar(); Date gToday=gCal.getTime(); gCal.add(GregorianCalendar.MONTH,2); Date gFuture=gCal.getTime(); DateFormat gDF = DateFormat.getDateInstance(gCal,DateFormat.FULL); logln("gregorian calendar: " + gDF.format(gToday) + " + 2 months = " + gDF.format(gFuture)); // Coptic Calendar CopticCalendar cCal= new CopticCalendar(); Date cToday=cCal.getTime(); cCal.add(CopticCalendar.MONTH,2); Date cFuture=cCal.getTime(); DateFormat cDF = DateFormat.getDateInstance(cCal,DateFormat.FULL); logln("coptic calendar: " + cDF.format(cToday) + " + 2 months = " + cDF.format(cFuture)); // EthiopicCalendar EthiopicCalendar eCal= new EthiopicCalendar(); Date eToday=eCal.getTime(); eCal.add(EthiopicCalendar.MONTH,2); // add 2 months eCal.setAmeteAlemEra(false); Date eFuture=eCal.getTime(); DateFormat eDF = DateFormat.getDateInstance(eCal,DateFormat.FULL); logln("ethiopic calendar: " + eDF.format(eToday) + " + 2 months = " + eDF.format(eFuture)); } public void TestAddSet() { class TestAddSetItem { private int startYear; private int startMonth; // 0-based private int startDay; // 1-based private int fieldToChange; private int fieldDelta; private int endYear; private int endMonth; private int endDay; TestAddSetItem(int sYr, int sMo, int sDa, int field, int delta, int eYr, int eMo, int eDa) { startYear = sYr; startMonth = sMo; startDay = sDa; fieldToChange = field; fieldDelta = delta; endYear = eYr; endMonth = eMo; endDay = eDa; } public int getStartYear() { return startYear; } public int getStartMonth() { return startMonth; } public int getStartDay() { return startDay; } public int getField() { return fieldToChange; } public int getDelta() { return fieldDelta; } public int getEndYear() { return endYear; } public int getEndMonth() { return endMonth; } public int getEndDay() { return endDay; } } final TestAddSetItem[] tests = { new TestAddSetItem( 1724, 12, 1, Calendar.MONTH, +1, 1725, 0, 1 ), new TestAddSetItem( 1724, 12, 1, Calendar.MONTH, +9, 1725, 8, 1 ), new TestAddSetItem( 1723, 12, 2, Calendar.MONTH, +1, 1724, 0, 2 ), // 1723 is a leap year new TestAddSetItem( 1723, 12, 2, Calendar.MONTH, +9, 1724, 8, 2 ), new TestAddSetItem( 1725, 0, 1, Calendar.MONTH, -1, 1724, 12, 1 ), new TestAddSetItem( 1725, 0, 1, Calendar.MONTH, -6, 1724, 7, 1 ), new TestAddSetItem( 1724, 12, 1, Calendar.DATE, +8, 1725, 0, 4 ), new TestAddSetItem( 1723, 12, 1, Calendar.DATE, +8, 1724, 0, 3 ), // 1723 is a leap year new TestAddSetItem( 1724, 0, 1, Calendar.DATE, -1, 1723, 12, 6 ), }; CopticCalendar testCalendar = new CopticCalendar(); for ( int i = 0; i < tests.length; i++ ) { TestAddSetItem item = tests[i]; testCalendar.set( item.getStartYear(), item.getStartMonth(), item.getStartDay(), 9, 0 ); testCalendar.add( item.getField(), item.getDelta() ); int endYear = testCalendar.get(Calendar.YEAR); int endMonth = testCalendar.get(Calendar.MONTH); int endDay = testCalendar.get(Calendar.DATE); if ( endYear != item.getEndYear() || endMonth != item.getEndMonth() || endDay != item.getEndDay() ) { errln("CToJD FAILS: field " + item.getField() + " delta " + item.getDelta() + " expected yr " + item.getEndYear() + " mo " + item.getEndMonth() + " da " + item.getEndDay() + " got yr " + endYear + " mo " + endMonth + " da " + endDay); } } } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.postgresql.model.generic; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.ext.generic.model.*; import org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel; import org.jkiss.dbeaver.ext.postgresql.PostgreUtils; import org.jkiss.dbeaver.ext.postgresql.model.PostgreGenericDataSource; import org.jkiss.dbeaver.ext.postgresql.model.PostgreGenericTrigger; import org.jkiss.dbeaver.ext.postgresql.model.PostgreGenericTypeCache; import org.jkiss.dbeaver.model.DBPDataSourceContainer; import org.jkiss.dbeaver.model.DBPErrorAssistant; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.exec.DBCQueryTransformProvider; import org.jkiss.dbeaver.model.exec.DBCQueryTransformType; import org.jkiss.dbeaver.model.exec.DBCQueryTransformer; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.exec.plan.DBCQueryPlanner; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCBasicDataTypeCache; import org.jkiss.dbeaver.model.impl.sql.QueryTransformerLimit; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.utils.CommonUtils; import org.osgi.framework.Version; import java.sql.SQLException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * PostgreMetaModel */ public class PostgreMetaModel extends GenericMetaModel implements DBCQueryTransformProvider { private Pattern ERROR_POSITION_PATTERN = Pattern.compile("\\n\\s*Position: ([0-9]+)"); public PostgreMetaModel() { super(); } @Override public GenericDataSource createDataSourceImpl(DBRProgressMonitor monitor, DBPDataSourceContainer container) throws DBException { return new PostgreGenericDataSource(monitor, container, this); } @Override public JDBCBasicDataTypeCache createDataTypeCache(@NotNull GenericStructContainer container) { return new PostgreGenericTypeCache(container); } public String getViewDDL(DBRProgressMonitor monitor, GenericView sourceObject, Map<String, Object> options) throws DBException { try (JDBCSession session = DBUtils.openMetaSession(monitor, sourceObject, "Read view definition")) { return JDBCUtils.queryString(session, "SELECT definition FROM PG_CATALOG.PG_VIEWS WHERE SchemaName=? and ViewName=?", sourceObject.getContainer().getName(), sourceObject.getName()); } catch (SQLException e) { throw new DBException(e, sourceObject.getDataSource()); } } @Override public String getProcedureDDL(DBRProgressMonitor monitor, GenericProcedure sourceObject) throws DBException { try (JDBCSession session = DBUtils.openMetaSession(monitor, sourceObject, "Read procedure definition")) { return JDBCUtils.queryString(session, "SELECT pg_get_functiondef(p.oid) FROM PG_CATALOG.PG_PROC P, PG_CATALOG.PG_NAMESPACE NS\n" + "WHERE ns.oid=p.pronamespace and ns.nspname=? AND p.proname=?", sourceObject.getContainer().getName(), sourceObject.getName()); } catch (SQLException e) { throw new DBException(e, sourceObject.getDataSource()); } } @Override public boolean supportsSequences(@NotNull GenericDataSource dataSource) { Version databaseVersion = dataSource.getInfo().getDatabaseVersion(); return databaseVersion.getMajor() >= 9 || databaseVersion.getMajor() == 8 && databaseVersion.getMinor() >= 4; } @Override public List<GenericSequence> loadSequences(@NotNull DBRProgressMonitor monitor, @NotNull GenericStructContainer container) throws DBException { Version databaseVersion = container.getDataSource().getInfo().getDatabaseVersion(); try (JDBCSession session = DBUtils.openMetaSession(monitor, container, "Read procedure definition")) { try (JDBCPreparedStatement dbStat = session.prepareStatement("SELECT sequence_name FROM information_schema.sequences WHERE sequence_schema=?")) { dbStat.setString(1, container.getName()); try (JDBCResultSet dbResult = dbStat.executeQuery()) { List<GenericSequence> result = new ArrayList<>(); while (dbResult.next()) { String name = JDBCUtils.safeGetString(dbResult, 1); String sequenceSql = "SELECT last_value,min_value,max_value,increment_by from " + container.getName() + "." + name; if (databaseVersion.getMajor() >= 10) { sequenceSql = "SELECT last_value, min_value, max_value, increment_by from pg_catalog.pg_sequences where schemaname=? and sequencename=?"; } try (JDBCPreparedStatement dbSeqStat = session.prepareStatement(sequenceSql)) { if (databaseVersion.getMajor() >= 10) { dbSeqStat.setString(1, container.getName()); dbSeqStat.setString(2, dbResult.getString(1)); } try (JDBCResultSet seqResults = dbSeqStat.executeQuery()) { seqResults.next(); GenericSequence sequence = new GenericSequence( container, name, PostgreUtils.getObjectComment(monitor, container, container.getName(), name), JDBCUtils.safeGetLong(seqResults, 1), JDBCUtils.safeGetLong(seqResults, 2), JDBCUtils.safeGetLong(seqResults, 3), JDBCUtils.safeGetLong(seqResults, 4)); result.add(sequence); } } } return result; } } } catch (SQLException e) { throw new DBException(e, container.getDataSource()); } } @Override public boolean supportsTriggers(@NotNull GenericDataSource dataSource) { return true; } @Override public List<PostgreGenericTrigger> loadTriggers(DBRProgressMonitor monitor, @NotNull GenericStructContainer container, @Nullable GenericTableBase table) throws DBException { try (JDBCSession session = DBUtils.openMetaSession(monitor, container, "Read triggers")) { StringBuilder sql = new StringBuilder(); sql.append("SELECT trigger_name,event_manipulation,action_order,action_condition,action_statement,action_orientation,action_timing\n" + "FROM INFORMATION_SCHEMA.TRIGGERS\n" + "WHERE "); if (table == null) { sql.append("trigger_schema=? AND event_object_table IS NULL"); } else { sql.append("event_object_schema=? AND event_object_table=?"); } try (JDBCPreparedStatement dbStat = session.prepareStatement(sql.toString())) { if (table == null) { dbStat.setString(1, container.getSchema().getName()); } else { dbStat.setString(1, table.getSchema().getName()); dbStat.setString(2, table.getName()); } Map<String, PostgreGenericTrigger> result = new LinkedHashMap<>(); try (JDBCResultSet dbResult = dbStat.executeQuery()) { while (dbResult.next()) { String name = JDBCUtils.safeGetString(dbResult, "trigger_name"); if (name == null) { continue; } String manipulation = JDBCUtils.safeGetString(dbResult, "event_manipulation"); PostgreGenericTrigger trigger = result.get(name); if (trigger != null) { trigger.addManipulation(manipulation); continue; } String description = ""; trigger = new PostgreGenericTrigger( container, table, name, description, manipulation, JDBCUtils.safeGetString(dbResult, "action_orientation"), JDBCUtils.safeGetString(dbResult, "action_timing"), JDBCUtils.safeGetString(dbResult, "action_statement")); result.put(name, trigger); } } return new ArrayList<>(result.values()); } } catch (SQLException e) { throw new DBException(e, container.getDataSource()); } } @Override public String getTriggerDDL(@NotNull DBRProgressMonitor monitor, @NotNull GenericTrigger trigger) throws DBException { // Never be here return null; } @Override public DBCQueryPlanner getQueryPlanner(@NotNull GenericDataSource dataSource) { return new PostgreGenericQueryPlaner(dataSource); } @Override public DBPErrorAssistant.ErrorPosition getErrorPosition(@NotNull Throwable error) { String message = error.getMessage(); if (!CommonUtils.isEmpty(message)) { Matcher matcher = ERROR_POSITION_PATTERN.matcher(message); if (matcher.find()) { DBPErrorAssistant.ErrorPosition pos = new DBPErrorAssistant.ErrorPosition(); pos.position = Integer.parseInt(matcher.group(1)) - 1; return pos; } } return null; } @Nullable @Override public DBCQueryTransformer createQueryTransformer(@NotNull DBCQueryTransformType type) { if (type == DBCQueryTransformType.RESULT_SET_LIMIT) { return new QueryTransformerLimit(false, true); } return null; } }
package org.ncbo.resource_access_tools.resource.micad; import org.ncbo.resource_access_tools.enumeration.ResourceType; import org.ncbo.resource_access_tools.populate.Element; import org.ncbo.resource_access_tools.populate.Structure; import org.ncbo.resource_access_tools.resource.ResourceAccessTool; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; /** * Extract data from micad. * Use Micad's csv file and access web to get all the data. * * @author Kranthi Kode * @version MicadAccessTool v2 * @date 19-October-2009 */ public class MicadAccessTool extends ResourceAccessTool { private static final String MICAD_URL = "http://www.micad.nih.gov/"; private static final String MICAD_NAME = "MICAD"; private static final String MICAD_RESOURCEID = "MICAD"; private static final String MICAD_DESCRIPTION = "Molecular Imaging and Contrast Agent Database"; private static final String MICAD_LOGO = "http://www.ncbi.nlm.nih.gov/corehtml/pmc/pmcgifs/bookshelf/thumbs/th-micad-lrg.png"; private static final String MICAD_ELT_URL = "http://www.ncbi.nlm.nih.gov/bookshelf/br.fcgi?book=micad&part="; private static final String[] MICAD_ITEMKEYS = {"Name", "Abbreviated", "Synonym", "Agent_Category", "Target", "Target_Category", "Detection_Method", "Signal_Source", "Background", "Synthesis", "In_Vitro", "Animal_Studies", "Human_Studies", "References"}; private static final Double[] MICAD_WEIGHTS = {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0}; private static final String[] MICAD_ONTOIDS = {Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION, Structure.FOR_CONCEPT_RECOGNITION}; private static final Structure MICAD_STRUCTURE = new Structure(MICAD_ITEMKEYS, MICAD_RESOURCEID, MICAD_WEIGHTS, MICAD_ONTOIDS); private static final String MICAD_MAIN_ITEMKEY = "Name"; // constructor public MicadAccessTool() { super(MICAD_NAME, MICAD_RESOURCEID, MICAD_STRUCTURE); try { this.getToolResource().setResourceURL(new URL(MICAD_URL)); this.getToolResource().setResourceDescription(MICAD_DESCRIPTION); this.getToolResource().setResourceLogo(new URL(MICAD_LOGO)); this.getToolResource().setResourceElementURL(MICAD_ELT_URL); } catch (MalformedURLException e) { e.printStackTrace(); } } @Override public ResourceType getResourceType() { return ResourceType.SMALL; } @Override public void updateResourceInformation() { // TODO // can be used to update resource name, description, logo, elt_url. } @Override public HashSet<String> queryOnlineResource(String query) { // TODO // not used for MICADtome return new HashSet<String>(); } @Override public String elementURLString(String elementLocalID) { return MICAD_ELT_URL + elementLocalID; } public String itemKeyForAnnotationForBP() { return MICAD_MAIN_ITEMKEY; } @Override public int updateResourceContent() { int nbElement = 0; try { Element myElement; /************* variable part ****************/ // get the list of elements present in the original resource (CSV file) String[][] csvElements = this.getLocalElementIds(); // get the list of element Ids from csvElements HashSet<String> elementIDList = new HashSet<String>(); String[] temp; for (int i = 1; i <= csvElements.length - 1; i++) { //Split the element in 18'th column in csv if (csvElements[i][18] != null) { temp = csvElements[i][18].split("micad&part="); if (temp.length == 2) elementIDList.add(temp[1]); } } // System.out.println(elementIDList); logger.info(elementIDList.size() + " chemicals found. "); // gets the elements already in the corresponding _ET and keeps only the difference HashSet<String> allElementsInET = resourceUpdateService.getAllLocalElementIDs(); //HashSet<Long> allElementsInETasLong = new HashSet<Long>(allElementsInET.size()); HashSet<String> allElementsInETasLong = new HashSet<String>(allElementsInET.size()); for (String elementInET : allElementsInET) { //allElementsInETasLong.add(Long.parseLong(elementInET)); allElementsInETasLong.add((elementInET)); } elementIDList.removeAll(allElementsInETasLong); logger.info("Number of new elements to dump: " + elementIDList.size()); // get data associated with each of these elements // and populate the ElementTable Iterator<String> i = elementIDList.iterator(); while (i.hasNext()) { String elementID = i.next(); // get data of this element myElement = this.getElement(elementID, csvElements); // populates OBR_MICAD_ET with this element if (resourceUpdateService.addElement(myElement)) { nbElement++; } } /*********************************************/ } catch (Exception e) { logger.error("** PROBLEM ** Cannot update resource " + this.getToolResource().getResourceName(), e); } logger.info(nbElement + " elements from " + MICAD_NAME + " added to the element table."); return nbElement; } /** * Get the list of all elementID of the resource. * For MICAD database, get the list of elementIDs from csv file. */ private String[][] getLocalElementIds() { String[][] elements = new String[2000][50]; logger.info(" Get the list of all elmentIDs from " + MICAD_NAME + " ... "); try { MicadReadCsv myExtractor = new MicadReadCsv(this.getToolResource(), this); elements = myExtractor.getLocalElementIds(); } catch (Exception e) { logger.error("** PROBLEM ** Problem when extracting elementID from the original resource. " + "Check MicadReadCsv", e); } return elements; } /** * Get a complete Element (Structure filled) from the resource. * For now, we will just get few elements from csv file * * @return */ private Element getElement(String elementID, String[][] csvElement) { Element element = null; String[] parts; String[] data; ArrayList<String> contextNames = this.getToolResource().getResourceStructure().getContextNames(); Structure eltStructure = new Structure(contextNames); String Name; String Abbreviated; String Synonym; String Agent_Category; String Target; String Target_Category; String Detection_Method; String Signal_Source; String Background; String Synthesis; String In_Vitro; String Animal_Studies; String Human_Studies; String References; //System.out.println("Get data for the Element "+elementID.toString()+" ... "); try { MicadReadCsv csvExtractor = new MicadReadCsv(this.getToolResource(), this); parts = csvExtractor.getElement(elementID, csvElement); for (int i = 0; i <= 7; i++) if (parts[i] == null) parts[i] = EMPTY_STRING; Name = parts[0]; Abbreviated = parts[1]; Synonym = parts[2]; Agent_Category = parts[3]; Target = parts[4]; Target_Category = parts[5]; Detection_Method = parts[6]; Signal_Source = parts[7]; MicadReadWeb webExtractor = new MicadReadWeb(this.getToolResource(), this); data = webExtractor.getSections(elementID); for (int i = 0; i <= 5; i++) if (data[i] == null) data[i] = EMPTY_STRING; Background = data[0]; Synthesis = data[1]; In_Vitro = data[2]; Animal_Studies = data[3]; Human_Studies = data[4]; References = data[5]; // Creating element structure eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[0]), Name); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[1]), Abbreviated); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[2]), Synonym); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[3]), Agent_Category); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[4]), Target); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[5]), Target_Category); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[6]), Detection_Method); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[7]), Signal_Source); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[8]), Background); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[9]), Synthesis); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[10]), In_Vitro); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[11]), Animal_Studies); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[12]), Human_Studies); eltStructure.putContext(Structure.generateContextName(MICAD_RESOURCEID, MICAD_ITEMKEYS[13]), References); // Creating element element = new Element(elementID.toString(), eltStructure); } catch (Exception e) { logger.error("** PROBLEM ** Problem when extracting" + elementID.toString() + " from " + MICAD_NAME + ". Check the AccessTool", e); } return element; } @Override public String mainContextDescriptor() { // TODO Auto-generated method stub return MICAD_MAIN_ITEMKEY; } }
package models; public class TreeNode2 implements java.io.Serializable{ /** * */ private static final long serialVersionUID = 1L; boolean isRoot=false; int Categories; float [] CategoricalG; float [] CategoricalH; int [] CategoricalCount; float GlobalG=0; float GlobalH=0; int GlobalCount=0;; float prediction; int count1=0; int count2=0; int topFeature=-1; int topField=-1; boolean Completed=false; boolean leafNode=true; boolean FinalLeft=false; boolean FinalRight=false; boolean CompletedLeft=false; boolean CompletedRight=false; TreeNode2 leftChild; TreeNode2 rightChild; int minLeafCount=1; int depth=0; TreeStats treeStats=new TreeStats(); int [] fields; String direction=""; public void InitCategorical(int inputNodes){ Categories=inputNodes; CategoricalG = new float[Categories]; CategoricalH = new float[Categories]; CategoricalCount = new int[Categories]; } public void InsertCatFeature(int ID, float RealValue,float residual){ CategoricalG[ID]+=calculateG(RealValue,residual); CategoricalH[ID]+=calculateH(RealValue,residual); CategoricalCount[ID]++; } public float predict(int [] FeatureVector){ if(leafNode){ return prediction; } boolean left=false; int fieldPos=0; for(int i=0;i<FeatureVector.length;i++){ int FeatureID=FeatureVector[i]; if(!(FeatureID<fields[fieldPos])){ fieldPos++; } if(FeatureID<=topFeature && fieldPos==topField){ //System.out.println(FeatureID+" - "+ topFeature+" - "+ fieldPos+" - "+ topField); left=true; break; }else{ } } if(left){ if(FinalLeft){ return prediction; }else{ return leftChild.predict(FeatureVector); } }else{ if(FinalRight){ return prediction; }else{ return rightChild.predict(FeatureVector); } } } public int CalculateCost(){ if(!leafNode){ int LeftResult=-1; if(!FinalLeft){ LeftResult=leftChild.CalculateCost(); } if(LeftResult==-1){ leftChild=null; FinalLeft=true; CompletedLeft=true; }else if(LeftResult==-2){ CompletedLeft=true; } int RightResult=-1; if(!FinalRight){ RightResult=rightChild.CalculateCost(); } if(RightResult==-1){ rightChild=null; FinalRight=true; CompletedRight=true; }else if(RightResult==-2){ CompletedRight=true; } if(FinalRight && FinalLeft){ leafNode=true; } if(CompletedRight && CompletedLeft){ return -2; } return 0; } if(Completed){ return -2; } prediction=(-GlobalG/GlobalH); //System.out.println("prediction="+prediction); float maxGain=Float.MIN_VALUE; boolean ValidChild=false; float predictionRight=0; float predictionLeft=0; float countLF=0; for(int i=0;i<fields.length;i++){ //System.out.println(i+"="+fields[i]); } int fieldPos=0; int fieldSum=0; float [] fieldsG=new float[fields.length]; float [] fieldsH=new float[fields.length]; int [] fieldsCount=new int[fields.length]; for(int i=0;i<CategoricalG.length;i++){ if(i>fieldSum+fields[fieldPos]){ //System.out.println(i); //System.out.println(fieldPos+"="+fieldSum); fieldSum+=fields[fieldPos]; fieldPos++; } if(CategoricalCount[i]==0){ continue; } //float GL=CategoricalG[i]; fieldsG[fieldPos]+=CategoricalG[i]; float GL=fieldsG[fieldPos]; //float HL=CategoricalH[i]; fieldsH[fieldPos]+=CategoricalH[i]; float HL=fieldsH[fieldPos]; if(HL==0 || GlobalH-HL==0){ //continue; } fieldsCount[fieldPos]+=CategoricalCount[i]; int countL=fieldsCount[fieldPos]; float GR=GlobalG-GL; float HR=GlobalH-HL; int countR=GlobalCount-countL; float scoreLeft=GL*GL/HL; float scoreRight=GR*GR/HR; float scoreTot=(GL+GR)*(GL+GR)/(HL+HR); float gain=scoreTot-scoreLeft-scoreRight; gain=gain*-1; //System.out.println("Feature="+i+" Field="+fieldPos+" gain="+gain); if(gain>maxGain && gain>0 && countL>minLeafCount && countR>minLeafCount){ maxGain=gain; topFeature=i; topField=fieldPos; predictionRight=-GR/HR; predictionLeft=-GL/HL; ValidChild=true; countLF=countL; } } /* System.out.println("direction=" + direction); System.out.println("prediction=" + (-GlobalG/GlobalH)); System.out.println("predictionLeft=" + predictionLeft); System.out.println("predictionRight=" + predictionRight); System.out.println("GlobalG=" + GlobalG); System.out.println("GlobalH=" + GlobalH); System.out.println("count=" + GlobalCount); System.out.println("countLF=" + countLF); System.out.println("topFeature=" + topFeature); System.out.println("topField=" + topField); System.out.println("maxGain=" + maxGain); try { Thread.sleep(1); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } */ CategoricalG=null; CategoricalH=null; CategoricalCount=null; Completed=true; if(!ValidChild){ return -1; } /* System.out.println("maxGain="+maxGain); System.out.println("topFeature="+topFeature); System.out.println("countLF="+countLF); */ treeStats.LeafCount+=2; leftChild=CreateChild(); leftChild.prediction=predictionLeft; leftChild.direction+="-left"; leftChild.depth=this.depth+1; rightChild=CreateChild(); rightChild.prediction=predictionRight; rightChild.direction+="-right"; rightChild.depth=this.depth+1; leafNode=false; return 0; } public void UpdateCatFeatures(int [] FeatureVector, float RealValue,float residual){ //if(FinalLeft&&FinalRight){ if(CompletedLeft&&CompletedRight){ return; } if(!leafNode){ boolean left=false; int fieldPos=0; for(int i=0;i<FeatureVector.length;i++){ int FeatureID=FeatureVector[i]; if(!(FeatureID<fields[fieldPos])){ fieldPos++; } if(fieldPos==1 && FeatureID<1116){ //System.out.println(FeatureID+" - "+ topFeature+" - "+ fieldPos+" - "+ topField); } if(FeatureID<=topFeature && fieldPos==topField){ //System.out.println("left"); left=true; break; } } if(left){ //if(FinalLeft){ if(CompletedLeft){ return; } leftChild.UpdateCatFeatures(FeatureVector, RealValue,residual); }else{ //if(FinalRight){ if(CompletedRight){ return; } rightChild.UpdateCatFeatures(FeatureVector, RealValue,residual); } return; } GlobalG+=calculateG(RealValue,residual); GlobalH+=calculateH(RealValue,residual); GlobalCount++; for(int i=0;i<FeatureVector.length;i++){ if(i==0){ count1++; } if(i==1){ count2++; } InsertCatFeature(FeatureVector[i],RealValue,residual); } } public float calculateG(float RealValue,float residual){ return (residual-RealValue); } public float calculateH(float RealValue,float residual){ return 1; } public TreeNode2 CreateChild(){ TreeNode2 Child=new TreeNode2(); Child.InitCategorical(Categories); Child.minLeafCount=minLeafCount; Child.fields=fields; Child.direction=direction; Child.treeStats=treeStats; return Child; } public void PrintTree(){ System.out.println(depth+" : Feature="+ topFeature + "topField="+ topField + " prediction="+prediction); if(leftChild!=null){ leftChild.PrintTree(); } if(rightChild!=null){ rightChild.PrintTree(); } } public void ClearData(){ CategoricalG=null; CategoricalH=null; CategoricalCount=null; if(leftChild!=null){ leftChild.ClearData(); } if(rightChild!=null){ rightChild.ClearData(); } } }
/* * Copyright 2005 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.lang.xpath.xslt.refactoring; import com.intellij.codeInsight.highlighting.HighlightManager; import com.intellij.codeInsight.highlighting.HighlightManagerImpl; import com.intellij.injected.editor.EditorWindow; import com.intellij.lang.findUsages.LanguageFindUsages; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiReference; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlAttributeValue; import com.intellij.psi.xml.XmlTag; import com.intellij.util.Function; import com.intellij.util.IncorrectOperationException; import com.intellij.util.Processor; import com.intellij.util.Query; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.Nullable; import org.intellij.lang.xpath.psi.XPathElement; import org.intellij.lang.xpath.psi.XPathVariable; import org.intellij.lang.xpath.psi.impl.XPathChangeUtil; import org.intellij.lang.xpath.xslt.XsltSupport; import org.intellij.lang.xpath.xslt.psi.XsltElement; import org.intellij.lang.xpath.xslt.psi.XsltParameter; import org.intellij.lang.xpath.xslt.psi.XsltVariable; import org.intellij.lang.xpath.xslt.util.XsltCodeInsightUtil; import java.awt.*; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; class XsltInlineAction extends XsltRefactoringActionBase { private static final String NAME = "Inline"; private static final String TITLE = "XSLT - " + NAME; public String getRefactoringName() { return NAME; } public String getErrorMessage(Editor editor, PsiFile file, XmlAttribute context) { return "The caret should be positioned on the variable to be inlined.\n" + "Variables without 'select'-attribute and parameters cannot be inlined."; } protected boolean actionPerformedImpl(PsiFile file, Editor editor, XmlAttribute context, int offset) { PsiReference reference = file.findReferenceAt(offset); if (reference == null) reference = file.findReferenceAt(offset - 1); if (reference != null) { final PsiElement element = reference.resolve(); if (element instanceof XPathVariable) { if (!(element instanceof XsltParameter)) { inline((XPathVariable)element, editor); return true; } } } return false; } private void inline(final XPathVariable variable, Editor editor) { final String type = LanguageFindUsages.INSTANCE.forLanguage(variable.getLanguage()).getType(variable); final Project project = variable.getProject(); final XmlTag tag = ((XsltElement)variable).getTag(); final String expression = tag.getAttributeValue("select"); if (expression == null) { Messages.showDialog(project, MessageFormat.format("{0} ''{1}'' has no value.", StringUtil.capitalize(type), variable.getName()), TITLE, new String[]{ "OK" }, 0, Messages.getErrorIcon()); return; } final Collection<PsiReference> references = ReferencesSearch.search(variable, new LocalSearchScope(tag.getParentTag()), false).findAll(); if (references.size() == 0) { Messages.showDialog(project, MessageFormat.format("{0} ''{1}'' is never used.", StringUtil.capitalize(type), variable.getName()), TITLE, new String[]{ "OK" }, 0, Messages.getWarningIcon()); return; } boolean hasExternalRefs = false; if (XsltSupport.isTopLevelElement(tag)) { final Query<PsiReference> query = ReferencesSearch.search(variable, GlobalSearchScope.allScope(project), false); hasExternalRefs = !query.forEach(new Processor<PsiReference>() { int allRefs = 0; public boolean process(PsiReference psiReference) { if (++allRefs > references.size()) { return false; } else if (!references.contains(psiReference)) { return false; } return true; } }); } final HighlightManager highlighter = HighlightManager.getInstance(project); final ArrayList<RangeHighlighter> highlighters = new ArrayList<RangeHighlighter>(); final PsiReference[] psiReferences = references.toArray(new PsiReference[references.size()]); TextRange[] ranges = ContainerUtil.map2Array(psiReferences, TextRange.class, new Function<PsiReference, TextRange>() { public TextRange fun(PsiReference s) { final PsiElement psiElement = s.getElement(); final XmlAttributeValue context = PsiTreeUtil.getContextOfType(psiElement, XmlAttributeValue.class, true); if (psiElement instanceof XPathElement && context != null) { return XsltCodeInsightUtil.getRangeInsideHostingFile((XPathElement)psiElement).cutOut(s.getRangeInElement()); } return psiElement.getTextRange().cutOut(s.getRangeInElement()); } }); final Editor e = editor instanceof EditorWindow ? ((EditorWindow)editor).getDelegate() : editor; for (TextRange range : ranges) { final TextAttributes textAttributes = EditorColors.SEARCH_RESULT_ATTRIBUTES.getDefaultAttributes(); final Color color = getScrollmarkColor(textAttributes); highlighter.addOccurrenceHighlight(e, range.getStartOffset(), range.getEndOffset(), textAttributes, HighlightManagerImpl.HIDE_BY_ESCAPE, highlighters, color); } highlighter.addOccurrenceHighlights(e, new PsiElement[]{ ((XsltVariable)variable).getNameIdentifier() }, EditorColors.WRITE_SEARCH_RESULT_ATTRIBUTES.getDefaultAttributes(), false, highlighters); if (!hasExternalRefs) { if (Messages.showYesNoDialog(MessageFormat.format("Inline {0} ''{1}''? ({2} occurrence{3})", type, variable.getName(), String.valueOf(references.size()), references.size() > 1 ? "s" : ""), TITLE, Messages.getQuestionIcon()) != 0) { return; } } else { if (Messages.showYesNoDialog(MessageFormat.format("Inline {0} ''{1}''? ({2} local occurrence{3})\n" + "\nWarning: It is being used in external files. Its declaration will not be removed.", type, variable.getName(), String.valueOf(references.size()), references.size() > 1 ? "s" : ""), TITLE, Messages.getWarningIcon()) != 0) { return; } } final boolean hasRefs = hasExternalRefs; Runnable runnable = new Runnable() { public void run() { Runnable runnable = new Runnable() { public void run() { try { for (PsiReference psiReference : references) { final PsiElement element = psiReference.getElement(); if (element instanceof XPathElement) { final XPathElement newExpr = XPathChangeUtil.createExpression(element, expression); element.replace(newExpr); } else { assert false; } } if (!hasRefs) { tag.delete(); } } catch (IncorrectOperationException e) { Logger.getInstance(XsltInlineAction.this.getClass().getName()).error(e); } } }; CommandProcessor.getInstance().executeCommand(project, runnable, "XSLT.Inline", null); } }; ApplicationManager.getApplication().runWriteAction(runnable); } @Nullable private static Color getScrollmarkColor(TextAttributes textAttributes) { if (textAttributes.getErrorStripeColor() != null) { return textAttributes.getErrorStripeColor(); } else if (textAttributes.getBackgroundColor() != null) { return textAttributes.getBackgroundColor().darker(); } else { return null; } } }
/* * Copyright (c) 2001-2013 David Soergel <dev@davidsoergel.com> * Licensed under the Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package com.davidsoergel.dsutils.collections; import org.apache.log4j.Logger; import org.jetbrains.annotations.NotNull; import java.io.Serializable; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentSkipListSet; /** * A data structure that maps pairs of keys to values, and is queryable in both directions (i.e., also in the * value->keys direction). The order of the keys is unimportant. The key pairs are stored sorted by the associated * values, so eg. the pair with the lowest value can be queried. * * @author <a href="mailto:dev@davidsoergel.com">David Soergel</a> * @version $Id$ */ public class SortedSymmetric2dBiMapImpl<K extends Comparable<K> & Serializable, V extends Comparable<V> & Serializable> implements SortedSymmetric2dBiMap<K, V> // implements Serializable { // ------------------------------ FIELDS ------------------------------ private static final Logger logger = Logger.getLogger(SortedSymmetric2dBiMapImpl.class); // really we wanted a SortedBiMultimap or something, but lacking that, we just store the inverse map explicitly. //private TreeMultimap<V, KeyPair<K>> valueToKeyPair = new TreeMultimap<V, KeyPair<K>>(); //private V smallestValue; //private KeyPair<K> keyPairWithSmallestValue; @NotNull protected SimpleMultiMap<K, UnorderedPair<K>> keyToKeyPairs = new SimpleMultiMap<K, UnorderedPair<K>>(); // for some reason the HashMultimap didn't seem to work right... occasionally gave the wrong size(), and such. Maybe concurrency problems. // private Multimap<K, UnorderedPair<K>> keyToKeyPairs = HashMultimap.create(); // Multimaps.synchronizedSetMultimap(HashMultimap.create()); // we want a map sorted by value // simulate that using a regular map and a separate sorted set for the keysu // note that the key/value pair must be inserted into the regular map first, before the key is added to the sorted set // public only for the sake of efficient serialization in HierarchicalClusteringStringDistanceMatrix @NotNull public ConcurrentValueSortedMap<UnorderedPair<K>, V> keyPairToValueSorted = new ConcurrentValueSortedMap<UnorderedPair<K>, V>(); public SortedSymmetric2dBiMapImpl(@NotNull final SortedSymmetric2dBiMapImpl<K, V> cloneFrom) { keyToKeyPairs = new SimpleMultiMap<K, UnorderedPair<K>>(cloneFrom.keyToKeyPairs); keyPairToValueSorted = new ConcurrentValueSortedMap<UnorderedPair<K>, V>(cloneFrom.keyPairToValueSorted); } public SortedSymmetric2dBiMapImpl() { } // -------------------------- OTHER METHODS -------------------------- protected V get(UnorderedPair<K> keyPair) { return keyPairToValueSorted.get(keyPair); } public V get(@NotNull K key1, @NotNull K key2) { return get(new UnorderedPair<K>(key1, key2)); //getKeyPair(key1, key2)); } public Set<K> getKeys() { return keyToKeyPairs.keySet(); } @NotNull public K getKey1WithSmallestValue() { return getKeyPairWithSmallestValue().getKey1(); } @NotNull public K getKey2WithSmallestValue() { return getKeyPairWithSmallestValue().getKey2(); } public synchronized OrderedPair<UnorderedPair<K>, V> getKeyPairAndSmallestValue() { return keyPairToValueSorted.firstPair();//valueToKeyPair.get(getSmallestValue()).first(); } public UnorderedPair<K> getKeyPairWithSmallestValue() { return keyPairToValueSorted.firstKey();//valueToKeyPair.get(getSmallestValue()).first(); } public V getSmallestValue() { return keyPairToValueSorted.firstValue(); // ..get(getKeyPairWithSmallestValue());//valueToKeyPair.keySet().first();// distanceToPair is sorted } // in many applications the matrix actually won't be complete public void matrixCompleteSanityCheck() { final int numKeys = numKeys(); //if (numKeys > 2) // { final int numKeyPairs = keyPairToValueSorted.size(); // assert numKeyPairs == keyPairToValueSorted.size(); assert numKeyPairs == numKeys * (numKeys - 1) / 2; // } } public int numPairs() { return keyPairToValueSorted.size(); } public boolean isEmpty() { return keyPairToValueSorted.isEmpty(); } /*SymmetricHashMap2D<LengthWeightHierarchyNode<T>, LengthWeightHierarchyNode<T>, Double> theDistanceMatrix = new SymmetricHashMap2D<LengthWeightHierarchyNode<T>, LengthWeightHierarchyNode<T>, Double>();*/ public void put(@NotNull K key1, @NotNull K key2, @NotNull V d) { assert !key1.equals(key2); //sanityCheck(); @NotNull UnorderedPair<K> pair = new UnorderedPair<K>(key1, key2); keyPairToValueSorted.put(pair, d); keyToKeyPairs.put(key1, pair); keyToKeyPairs.put(key2, pair); //sanityCheck(); } /** * Does not populate keyToKeyPairs!! * * @param keyPair * @param d */ protected void put(@NotNull UnorderedPair<K> keyPair, @NotNull V d) { keyPairToValueSorted.put(keyPair, d); keyToKeyPairs.put(keyPair.getKey1(), keyPair); keyToKeyPairs.put(keyPair.getKey2(), keyPair); } /* private UnorderedPair getOrCreateKeyPair(K key1, K key2) { UnorderedPair<K> pair = getKeyPair(key1, key2); if (pair == null) { pair = new UnorderedPair(key1, key2); keyToKeyPairs.put(key1, pair); keyToKeyPairs.put(key2, pair); } return pair; }*/ /* private UnorderedPair<K> getKeyPair(K key1, K key2) { try { return //DSCollectionUtils Sets.intersection(keyToKeyPairs.get(key1), keyToKeyPairs.get(key2)).iterator().next(); } catch (NoSuchElementException e) { return null; } } */ protected synchronized void sanityCheck() { final int numKeys = numKeys(); //if (numKeys > 2) // { final int numKeyPairs = keyPairToValueSorted.size(); // assert numKeyPairs == keyPairToValueSorted.size(); assert numKeyPairs <= numKeys * (numKeys - 1) / 2; // } } public synchronized int numKeys() { return keyToKeyPairs.numKeys(); } /* public synchronized Collection<V> values() { return keyPairToValueSorted.values(); }*/ public synchronized void addKey(K key1) { keyToKeyPairs.get(key1); } public synchronized void putAll(@NotNull final Map<UnorderedPair<K>, V> result) { // sanityCheck(); for (@NotNull Map.Entry<UnorderedPair<K>, V> entry : result.entrySet()) { UnorderedPair<K> pair = entry.getKey(); @NotNull final K key1 = pair.getKey1(); @NotNull final K key2 = pair.getKey2(); assert !key1.equals(key2); keyToKeyPairs.put(key1, pair); keyToKeyPairs.put(key2, pair); keyPairToValueSorted.put(pair, entry.getValue()); } // sanityCheck(); } public synchronized void removeAll(@NotNull final Collection<K> keys) { for (K key : keys) { remove(key); } } /** * Remove all key pairs and values associated with the given key * * @param b */ public synchronized int remove(K b) { //sanityCheck(); int removed = 0; @NotNull final Collection<UnorderedPair<K>> obsoletePairs = new HashSet<UnorderedPair<K>>(keyToKeyPairs.get(b)); for (@NotNull UnorderedPair<K> pair : obsoletePairs) { removed++; //keyPairsInValueOrder.remove(pair); keyPairToValueSorted.remove(pair); // the pair may be in either order; we'll be removing b wholesale later, so // now make sure that a is the element of the pair that is not b @NotNull K a = pair.getKey1(); if (a.equals(b)) { a = pair.getKey2(); assert !a.equals(b); } keyToKeyPairs.get(a).remove(pair); // keyToKeyPairs.get(b).remove(pair); // avoid ConcurrentModificationException by doing these all at once at the end //keyToKeyPairs.get(b).remove(pair); /* try { SortedSet<KeyPair<K>> test = valueToKeyPair.get(oldValue); test.remove(pair); // valueToKeyPair.remove(oldValue, pair); // does not work; bug in StandardMultimap } catch (NullPointerException e) { logger.error("Error", e); }*/ // sanityCheck(); } keyToKeyPairs.removeAll(b); // sanityCheck(); return removed; } public Set<Map.Entry<UnorderedPair<K>, V>> entrySet() { return keyPairToValueSorted.entrySet(); } public void removalSanityCheck(final K removedKey, @NotNull final Collection<K> remainingKeys) { assert !getKeys().contains(removedKey); for (@NotNull Map.Entry<UnorderedPair<K>, V> entry : keyPairToValueSorted.entrySet()) { @NotNull final K k1 = entry.getKey().getKey1(); @NotNull final K k2 = entry.getKey().getKey2(); assert !k1.equals(removedKey); assert !k2.equals(removedKey); assert remainingKeys.contains(k1); assert remainingKeys.contains(k2); } } @NotNull public ConcurrentLinkedQueue<Map.Entry<UnorderedPair<K>, V>> entriesQueue() { return keyPairToValueSorted.entriesQueue(); } // -------------------------- INNER CLASSES -------------------------- protected class SimpleMultiMap<X extends Serializable, Y extends Serializable> implements Serializable { // ------------------------------ FIELDS ------------------------------ @NotNull private Map<X, Set<Y>> contents = new ConcurrentHashMap<X, Set<Y>>(); public SimpleMultiMap() { } /** * The keys themselves are not cloned * * @param cloneFrom */ public SimpleMultiMap(@NotNull final SimpleMultiMap<X, Y> cloneFrom) { contents.putAll(cloneFrom.contents); } // -------------------------- OTHER METHODS -------------------------- public void addKeys(final Collection<X> c) { for (X a : c) { Set<Y> ys = contents.get(a); if (ys == null) { ys = new ConcurrentSkipListSet<Y>(); contents.put(a, ys); } } } public Collection<Y> get(final X a) { Set<Y> ys = contents.get(a); if (ys == null) { ys = new ConcurrentSkipListSet<Y>(); contents.put(a, ys); } return ys; } public Set<X> keySet() { return contents.keySet(); } public int numKeys() { return contents.size(); } public void put(@NotNull final X key1, @NotNull final Y val) { get(key1).add(val); } public void removeAll(final X b) { contents.remove(b); } } }
/* * Copyright (C) The Apache Software Foundation. All rights reserved. * * This software is published under the terms of the Apache Software * License version 1.1, a copy of which has been included with this * distribution in the LICENSE.APL file. */ // Contibutors: Aaron Greenhouse <aarong@cs.cmu.edu> package org.apache.log4j; import org.apache.log4j.Category; import org.apache.log4j.spi.LoggingEvent; import org.apache.log4j.helpers.BoundedFIFO; import org.apache.log4j.helpers.OptionConverter; import org.apache.log4j.spi.AppenderAttachable; import org.apache.log4j.helpers.AppenderAttachableImpl; import org.apache.log4j.helpers.LogLog; import java.util.Enumeration; /** The AsyncAppender lets users log events asynchronously. It uses a bounded buffer to store logging events. <p>The AsyncAppender will collect the events sent to it and then dispatch them to all the appenders that are attached to it. You can attach multiple appenders to an AsyncAppender. <p>The AsyncAppender uses a separate thread to serve the events in its bounded buffer. <p>Refer to the results in {@link org.apache.log4j.performance.Logging} for the impact of using this appender. <p><b>Important note:</b> The <code>AsyncAppender</code> can only be script configured using the {@link org.apache.log4j.xml.DOMConfigurator}. Refer to example configuration files <a href="xml/examples/doc-files/sample4.xml">sample4.xml</a> and <a href="xml/examples/doc-files/sample5.xml">sample5.xml</a>. @author Ceki G&uuml;lc&uuml; @since version 0.9.1 */ public class AsyncAppender extends AppenderSkeleton implements AppenderAttachable { /** A string constant used in naming the option for setting the location information flag. Current value of this string constant is <b>LocationInfo</b>. <p>Note that all option keys are case sensitive. @deprecated Options are now handled using the JavaBeans paradigm. This constant is not longer needed and will be removed in the <em>near</em> term. */ public static final String LOCATION_INFO_OPTION = "LocationInfo"; /** A string constant used in naming the option for setting the size of the internal buffer where logging events are stored until they are written. Current value of this string constant is <b>BufferSize</b>. <p>Note that all option keys are case sensitive. @deprecated Options are now handled using the JavaBeans paradigm. This constant is not longer needed and will be removed in the <em>near</em> term. */ public static final String BUFFER_SIZE_OPTION = "BufferSize"; /** The default buffer size is set to 128 events. */ public static final int DEFAULT_BUFFER_SIZE = 128; //static Category cat = Category.getInstance(AsyncAppender.class.getName()); BoundedFIFO bf = new BoundedFIFO(DEFAULT_BUFFER_SIZE); AppenderAttachableImpl aai; Dispatcher dispatcher; boolean locationInfo = false; boolean interruptedWarningMessage = false; public AsyncAppender() { // Note: The dispatcher code assumes that the aai is set once and // for all!!! aai = new AppenderAttachableImpl(); dispatcher = new Dispatcher(bf, this); dispatcher.start(); } public void addAppender(Appender newAppender) { synchronized(aai) { aai.addAppender(newAppender); } } public void append(LoggingEvent event) { // Set the NDC and thread name for the calling thread as these // LoggingEvent fields were not set at event creation time. event.getNDC(); event.getThreadName(); if(locationInfo) { event.getLocationInformation(); } synchronized(bf) { while(bf.isFull()) { try { //LogLog.debug("Waiting for free space in buffer, "+bf.length()); bf.wait(); } catch(InterruptedException e) { if(!interruptedWarningMessage) { interruptedWarningMessage = true; LogLog.warn("AsyncAppender interrupted.", e); } else { LogLog.warn("AsyncAppender interrupted again."); } } } //cat.debug("About to put new event in buffer."); bf.put(event); if(bf.wasEmpty()) { //cat.debug("Notifying dispatcher to process events."); bf.notify(); } } } /** Close this <code>AsyncAppender</code> by interrupting the dispatcher thread which will process all pending events before exiting. */ public void close() { synchronized(this) { if(closed) // avoid multiple close, otherwise one gets NullPointerException return; closed = true; } // The following cannot be synchronized on "this" because the // dispatcher synchronizes with "this" in its while loop. If we // did synchronize we would systematically get deadlocks when // close was called. dispatcher.close(); try { dispatcher.join(); } catch(InterruptedException e) { LogLog.error("Got an InterruptedException while waiting for the "+ "dispatcher to finish.", e); } dispatcher = null; bf = null; } public Enumeration getAllAppenders() { synchronized(aai) { return aai.getAllAppenders(); } } public Appender getAppender(String name) { synchronized(aai) { return aai.getAppender(name); } } /** Returns the current value of the <b>LocationInfo</b> option. */ public boolean getLocationInfo() { return locationInfo; } /** The <code>AsyncAppender</code> does not require a layout. Hence, this method always returns <code>false</code>. */ public boolean requiresLayout() { return false; } public void removeAllAppenders() { synchronized(aai) { aai.removeAllAppenders(); } } public void removeAppender(Appender appender) { synchronized(aai) { aai.removeAppender(appender); } } public void removeAppender(String name) { synchronized(aai) { aai.removeAppender(name); } } /** The <b>LocationInfo</b> option takes a boolean value. By default, it is set to false which means there will be no effort to extract the location information related to the event. As a result, the event that will be ultimately logged will likely to contain the wrong location information (if present in the log format). <p>Location information extraction is comparatively very slow and should be avoided unless performance is not a concern. */ public void setLocationInfo(boolean flag) { locationInfo = flag; } /** The <b>BufferSize</b> option takes a non-negative integer value. This integer value determines the maximum size of the bounded buffer. Increasing the size of the buffer is always safe. However, if an existing buffer holds unwritten elements, then <em>decreasing the buffer size will result in event loss.</em> Nevertheless, while script configuring the AsyncAppender, it is safe to set a buffer size smaller than the {@link #DEFAULT_BUFFER_SIZE default buffer size} because configurators guarantee that an appender cannot be used before being completely configured. */ public void setBufferSize(int size) { bf.resize(size); } /** Returns the current value of the <b>BufferSize</b> option. */ public int getBufferSize() { return bf.getMaxSize(); } /** Returns the option names for this component in addition in addition to the options of its super class {@link AppenderSkeleton}. @deprecated We now use JavaBeans introspection to configure components. Options strings are no longer needed. */ public String[] getOptionStrings() { return OptionConverter.concatanateArrays(super.getOptionStrings(), new String[] {LOCATION_INFO_OPTION, BUFFER_SIZE_OPTION}); } /** Set AsyncAppender specific options: <p>On top of the options of the super class {@link AppenderSkeleton}, the only recognized options are <b>BufferSize</b> and <b>LocationInfo</b>. <p> The <b>BufferSize</b> option takes a non-negative integer value. This integer value determines the maximum size of the bounded buffer. Increasing the size of the buffer is always safe. However, if an existing buffer holds unwritten elements, then <em>decreasing the buffer size will result in event loss.</em> Nevertheless, while script configuring the AsyncAppender, it is safe to set a buffer size smaller than the {@link #DEFAULT_BUFFER_SIZE default buffer size} because configurators guarantee that an appender cannot be used before being completely configured. <p>The <b>LocationInfo</b> option takes a boolean value. By default, it is set to false which means there will be no effort to extract the location information related to the event. As a result, the event that will be ultimately logged will likely to contain the wrong location information (if present in the log format). <p>Location information extraction is comparatively very slow and should be avoided unless performance is not a concern. @deprecated Use the setter method for the option directly instead of the generic <code>setOption</code> method. */ public void setOption(String option, String value) { if(value == null) return; super.setOption(option, value); if (option.equals(LOCATION_INFO_OPTION)) locationInfo = OptionConverter.toBoolean(value, locationInfo); else if (option.equals(BUFFER_SIZE_OPTION)) { int newSize = OptionConverter.toInt(value, DEFAULT_BUFFER_SIZE); bf.resize(newSize); } } /* public String getOption(String option) { if (option.equals(LOCATION_INFO_OPTION)) { return locationInfo ? "true" : "false"; } else if (option.equals(BUFFER_SIZE_OPTION)) { return Integer.toString(bf.getMaxSize()); } else { return super.getOption(option); } } */ } // ------------------------------------------------------------------------------ // ------------------------------------------------------------------------------ // ---------------------------------------------------------------------------- class Dispatcher extends Thread { BoundedFIFO bf; AppenderAttachableImpl aai; boolean interrupted = false; AsyncAppender container; Dispatcher(BoundedFIFO bf, AsyncAppender container) { this.bf = bf; this.container = container; this.aai = container.aai; // set the dispatcher priority to lowest possible value this.setPriority(Thread.MIN_PRIORITY); this.setName("Dispatcher-"+getName()); // set the dispatcher priority to MIN_PRIORITY plus or minus 2 // depending on the direction of MIN to MAX_PRIORITY. //+ (Thread.MAX_PRIORITY > Thread.MIN_PRIORITY ? 1 : -1)*2); } void close() { synchronized(bf) { interrupted = true; // We have a waiting dispacther if and only if bf.length is // zero. In that case, we need to give it a death kiss. if(bf.length() == 0) { bf.notify(); } } } /** The dispatching strategy is to wait until there are events in the buffer to process. After having processed an event, we release the monitor (variable bf) so that new events can be placed in the buffer, instead of keeping the monitor and processing the remaining events in the buffer. <p>Other approaches might yield better results. */ public void run() { //Category cat = Category.getInstance(Dispatcher.class.getName()); LoggingEvent event; while(true) { synchronized(bf) { if(bf.length() == 0) { // Exit loop if interrupted but only if the the buffer is empty. if(interrupted) { //cat.info("Exiting."); return; } try { //LogLog.debug("Waiting for new event to dispatch."); bf.wait(); } catch(InterruptedException e) { LogLog.error("The dispathcer should not be interrupted."); break; } } event = bf.get(); if(bf.wasFull()) { //LogLog.debug("Notifying AsyncAppender about freed space."); bf.notify(); } } // synchronized // The synchronization on parent is necessary to protect against // operations on the aai object of the parent synchronized(container.aai) { if(aai != null && event != null) { aai.appendLoopOnAppenders(event); } } } // while } }
package view; import java.awt.Dimension; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTextField; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.DefaultTableModel; import controller.funzioniModeratoreController; import controller.loginController; import gui.funzioniModeratorePage; import gui.profiloUtentePage; public class funzioniModeratoreView { private static final String NUMERIC_PATTERN = "^(0|[1-9][0-9]*)$"; private static JTable tableRecensioni; private JScrollPane scrollPane; private static JTable tableUtenti; private JScrollPane scrollPaneUtenti; private static DefaultTableModel tableRecensioniModel; private static DefaultTableModel tableUtentiModel; /** * Method used to create the list of the reviews that haven't been approved yet. * This list is inserted into a JTable object which is in turn inserted into a JScrollPane * * @param recensioni a JPanel where the JScrollPane will appear * @param approva a JButton that when clicked approves the currently selected review * @param disapprova a JButton that when clicked disapproves the currently selected review. * */ public void creaListaRecensioni(JPanel recensioni, JButton approva, JButton disapprova) { String[] names = { "id", "testo", "approvata" }; Object[][] mMatrix = funzioniModeratoreController.listaRecensioni(); tableRecensioniModel = new DefaultTableModel(mMatrix, names) { @Override public boolean isCellEditable(int row, int column) { return false; } @Override public Class<?> getColumnClass(int columnIndex) { if (getRowCount() == 0) { return super.getColumnClass(columnIndex); } Object value = getValueAt(0, columnIndex); if (value == null) { return super.getColumnClass(columnIndex); } return value.getClass(); } }; this.tableRecensioni = new JTable(tableRecensioniModel); scrollPane = new JScrollPane(tableRecensioni); scrollPane.setColumnHeaderView(tableRecensioni.getTableHeader()); scrollPane.setPreferredSize(new Dimension(800, 200)); recensioni.add(scrollPane); this.tableRecensioni.getSelectionModel().addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent event) { // do some actions here, for example // print first column value from selected row if (!event.getValueIsAdjusting() && tableRecensioni.getSelectedRow() >= 0 && tableRecensioni.getSelectedRow() <= tableRecensioni.getRowCount()) { approva.putClientProperty("id", tableRecensioni.getValueAt(tableRecensioni.getSelectedRow(), 0).toString()); approva.putClientProperty("riga", tableRecensioni.getSelectedRow()); disapprova.putClientProperty("id", tableRecensioni.getValueAt(tableRecensioni.getSelectedRow(), 0).toString()); disapprova.putClientProperty("riga", tableRecensioni.getSelectedRow()); } } }); } /** * Method used to approve a specific review * * @param btnApprova a JButton that when clicked approves the currently selected review * @param lblErroreRecensioni a JLabel used to display error to the user. * */ public void approvaRecensione(JButton btnApprova, JLabel lblErroreRecensioni) { lblErroreRecensioni.setText(""); String idRecensione = (String) btnApprova.getClientProperty("id"); System.out.println(idRecensione); funzioniModeratoreController.approvaRecensione(idRecensione); int riga = 0; if(btnApprova.getClientProperty("riga") == null){ lblErroreRecensioni.setText("Errore - Seleziona una riga!"); } else { riga = (int)btnApprova.getClientProperty("riga"); tableRecensioniModel.removeRow(riga); } } /** * Method used to disapprove a specific review * * @param btnDisapprova a JButton that when clicked disapproves the currently selected review * @param lblErroreRecensioni a JLabel used to display error to the user. * */ public void disapprovaRecensione(JButton btnDisapprova, JLabel lblErroreRecensioni) { lblErroreRecensioni.setText(""); String idRecensione = (String) btnDisapprova.getClientProperty("id"); System.out.println(idRecensione); funzioniModeratoreController.disapprovaRecensione(idRecensione); int riga = 0; if(btnDisapprova.getClientProperty("riga") == null){ lblErroreRecensioni.setText("Errore - Seleziona una riga!"); } else{ riga = (int)btnDisapprova.getClientProperty("riga"); tableRecensioniModel.removeRow(riga); } } /** * Method used to modify the user's xp points. Only a moderator can use this method. * * @param comboBox a JComboBox used to select if moderator wants to increase or decrease xp * @param xpField a JTextField used to insert how many xp points give/remove * @param btnAfferma a JButton that when clicked change the xp * @param lblErroreUtenti a JLabel used to display error to the user. * */ public void modificaXPUtente(JComboBox comboBox, JTextField xpField, JButton btnAfferma, JLabel lblErroreUtenti) throws NumberFormatException{ int newXP = 0, riga = 0; lblErroreUtenti.setText(""); if (!controllaTesto(xpField)){ lblErroreUtenti.setText("Errore - Inserire un numero intero!"); return; } String id = (String) btnAfferma.getClientProperty("id"); System.out.println(id); if (btnAfferma.getClientProperty("riga") != null) riga = (int) btnAfferma.getClientProperty("riga"); else lblErroreUtenti.setText("Errore - Selezionare un utente!"); int exXP = (int) tableUtentiModel.getValueAt(riga, 2); int exLevel = (int) tableUtentiModel.getValueAt(riga, 3); String op = ""; try { if (comboBox.getSelectedItem().equals("Aumenta")) { op = "+"; newXP = exXP + new Integer(xpField.getText()); } else { op = "-"; newXP = exXP - new Integer(xpField.getText()); } op += xpField.getText(); } catch (Exception e) { System.out.println("Errore formato numero!"); } tableUtentiModel.setValueAt(newXP, riga, 2); System.out.println(op); // Level check. int newLevel = (int) newXP / 100; if (newLevel != exLevel) { tableUtentiModel.setValueAt(newLevel, riga, 3); } funzioniModeratoreController.modificaXPUtente(op, id); } /** * Method used to check if the text contains only numerics * * @param xpField a JTextField with user's data. * */ private boolean controllaTesto(JTextField xpField) { Pattern pattern; Matcher matcher; pattern = Pattern.compile(NUMERIC_PATTERN); matcher = pattern.matcher(xpField.getText()); if (!(matcher.matches())) { return false; } return true; } /** * Method used to create the list of the system's users. * This list is inserted into a JTable object which is in turn inserted into a JScrollPane * * @param cambioLivello a JPanel where the JScrollPane will appear * @param btnAfferma a JButton used to fires the event. * */ public void creaListaUtenti(JPanel cambioLivello, JButton btnAfferma) { // TODO Auto-generated method stub String[] names = { "Id", "Username", "XP", "Livello" }; Object[][] mMatrix = funzioniModeratoreController.listaUtenti(loginController.mObject.getID()); tableUtentiModel = new DefaultTableModel(mMatrix, names) { @Override public boolean isCellEditable(int row, int column) { return false; } }; tableUtenti = new JTable(tableUtentiModel); scrollPaneUtenti = new JScrollPane(tableUtenti); scrollPaneUtenti.setColumnHeaderView(tableUtenti.getTableHeader()); scrollPaneUtenti.setPreferredSize(new Dimension(800, 200)); cambioLivello.add(scrollPaneUtenti); // tableUtenti.tableChanged(new // TableModelEvent(tableUtenti.getModel())); tableUtenti.getSelectionModel().addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent event) { if (!event.getValueIsAdjusting() && tableUtenti.getSelectedRow() >= 0 && tableUtenti.getSelectedRow() <= tableUtenti.getRowCount()) {// This // line // prevents // double // events System.out.println(tableUtenti.getValueAt(tableUtenti.getSelectedRow(), 0).toString()); // mostraRecensione(table.getValueAt(table.getSelectedRow(), // 0).toString()); btnAfferma.putClientProperty("id", tableUtenti.getValueAt(tableUtenti.getSelectedRow(), 0).toString()); btnAfferma.putClientProperty("riga", tableUtenti.getSelectedRow()); // creaListaRecensioni(recensioni,approva,disapprova); } } }); } /** * Method used to close the frame. * * @param fMP the frame that has to be closed. * */ public void tornaDietro(funzioniModeratorePage fMP) { // TODO Auto-generated method stub fMP.setVisible(false); fMP.dispose(); funzioniModeratoreController.aggiornaDatiGioco(); profiloUtentePage framePaginaUtente = new profiloUtentePage(loginController.mObject); framePaginaUtente.setVisible(true); } }
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.interestrate.bond.provider; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertTrue; import org.testng.annotations.Test; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.datasets.CalendarUSD; import com.opengamma.analytics.financial.instrument.annuity.AnnuityDefinition; import com.opengamma.analytics.financial.instrument.annuity.AnnuityDefinitionBuilder; import com.opengamma.analytics.financial.instrument.bond.BondDataSetsGbp; import com.opengamma.analytics.financial.instrument.bond.BondFixedSecurityDefinition; import com.opengamma.analytics.financial.instrument.bond.BondTotalReturnSwapDefinition; import com.opengamma.analytics.financial.instrument.index.IborIndex; import com.opengamma.analytics.financial.instrument.index.IndexIborMaster; import com.opengamma.analytics.financial.instrument.payment.CouponDefinition; import com.opengamma.analytics.financial.instrument.payment.CouponFixedDefinition; import com.opengamma.analytics.financial.instrument.payment.PaymentDefinition; import com.opengamma.analytics.financial.instrument.payment.PaymentFixedDefinition; import com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitor; import com.opengamma.analytics.financial.interestrate.annuity.derivative.Annuity; import com.opengamma.analytics.financial.interestrate.bond.definition.BondFixedSecurity; import com.opengamma.analytics.financial.interestrate.bond.definition.BondTotalReturnSwap; import com.opengamma.analytics.financial.interestrate.payments.derivative.Payment; import com.opengamma.analytics.financial.provider.calculator.discounting.PV01CurveParametersCalculator; import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueDiscountingCalculator; import com.opengamma.analytics.financial.provider.calculator.issuer.PresentValueCurveSensitivityIssuerCalculator; import com.opengamma.analytics.financial.provider.calculator.issuer.PresentValueIssuerCalculator; import com.opengamma.analytics.financial.provider.description.IssuerProviderDiscountDataSets; import com.opengamma.analytics.financial.provider.description.interestrate.IssuerProviderDiscount; import com.opengamma.analytics.financial.provider.description.interestrate.ParameterIssuerProviderInterface; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyMulticurveSensitivity; import com.opengamma.analytics.financial.util.AssertSensitivityObjects; import com.opengamma.analytics.util.amount.ReferenceAmount; import com.opengamma.analytics.util.time.TimeCalculator; import com.opengamma.financial.convention.StubType; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.timeseries.precise.zdt.ImmutableZonedDateTimeDoubleTimeSeries; import com.opengamma.timeseries.precise.zdt.ZonedDateTimeDoubleTimeSeries; import com.opengamma.util.money.Currency; import com.opengamma.util.money.MultipleCurrencyAmount; import com.opengamma.util.time.DateUtils; import com.opengamma.util.tuple.Pair; /** * Test related to the bond total return swap pricing methodology by discounting of the cash-flows. */ public class BondTotalReturnSwapDiscountingMethodTest { private static final ZonedDateTime EFFECTIVE_DATE_1 = DateUtils.getUTCDate(2012, 2, 9); private static final ZonedDateTime EFFECTIVE_DATE_2 = DateUtils.getUTCDate(2012, 3, 9); private static final ZonedDateTime TERMINATION_DATE_1 = DateUtils.getUTCDate(2012, 5, 9); private static final ZonedDateTime TERMINATION_DATE_2 = DateUtils.getUTCDate(2012, 9, 9); private static final ZonedDateTime REFERENCE_DATE_1 = DateUtils.getUTCDate(2012, 2, 2); // Before effective date. private static final ZonedDateTime REFERENCE_DATE_2 = DateUtils.getUTCDate(2012, 2, 16); // After effective date 1. private static final double EFFECTIVE_TIME_1_1 = TimeCalculator.getTimeBetween(REFERENCE_DATE_1, EFFECTIVE_DATE_1); private static final double EFFECTIVE_TIME_2_1 = TimeCalculator.getTimeBetween(REFERENCE_DATE_2, EFFECTIVE_DATE_1); private static final double EFFECTIVE_TIME_1_2 = TimeCalculator.getTimeBetween(REFERENCE_DATE_1, EFFECTIVE_DATE_2); private static final double TERMINATION_TIME_1_1 = TimeCalculator.getTimeBetween(REFERENCE_DATE_1, TERMINATION_DATE_1); private static final double TERMINATION_TIME_1_2 = TimeCalculator.getTimeBetween(REFERENCE_DATE_1, TERMINATION_DATE_2); private static final double TERMINATION_TIME_2_1 = TimeCalculator.getTimeBetween(REFERENCE_DATE_2, TERMINATION_DATE_1); private static final ZonedDateTime[] FIXING_DATES = new ZonedDateTime[] {DateUtils.getUTCDate(2012, 2, 7), DateUtils.getUTCDate(2012, 2, 8), DateUtils.getUTCDate(2012, 2, 9), DateUtils.getUTCDate(2012, 3, 7) }; private static final double[] FIXING_RATES = new double[] {0.0040, 0.0041, 0.0042, 0.0043 }; private static final ZonedDateTimeDoubleTimeSeries FIXING_TS = ImmutableZonedDateTimeDoubleTimeSeries.ofUTC(FIXING_DATES, FIXING_RATES); private static final double NOTIONAL_TRS = 123456000; // Bond (UKT) private static final double NOTIONAL_BND = 100000000; private static final BondFixedSecurityDefinition UKT14_DEFINITION = BondDataSetsGbp.bondUKT5_20140907(); private static final BondFixedSecurity UKT14_1_1 = UKT14_DEFINITION.toDerivative(REFERENCE_DATE_1, EFFECTIVE_DATE_1); private static final BondFixedSecurity UKT14_1_2 = UKT14_DEFINITION.toDerivative(REFERENCE_DATE_1, EFFECTIVE_DATE_2); private static final BondFixedSecurity UKT14_2_1 = UKT14_DEFINITION.toDerivative(REFERENCE_DATE_2, EFFECTIVE_DATE_1); private static final Currency GBP = UKT14_DEFINITION.getCurrency(); // Funding: unique fixed coupon in GBP: receive TRS bond, pay funding private static final double RATE = 0.0043; private static final CouponFixedDefinition FUNDING_FIXED_CPN_REC_DEFINITION = new CouponFixedDefinition(UKT14_DEFINITION.getCurrency(), TERMINATION_DATE_1, EFFECTIVE_DATE_1, TERMINATION_DATE_1, 0.25, NOTIONAL_TRS, RATE); private static final PaymentFixedDefinition FUNDING_FIXED_NTL_REC_DEFINITION = new PaymentFixedDefinition(GBP, TERMINATION_DATE_1, NOTIONAL_TRS); private static final AnnuityDefinition<? extends PaymentDefinition> FUNDING_LEG_FIXED_REC_DEFINITION = new AnnuityDefinition<>(new PaymentDefinition[] {FUNDING_FIXED_CPN_REC_DEFINITION, FUNDING_FIXED_NTL_REC_DEFINITION }, UKT14_DEFINITION.getCalendar()); private static final Annuity<? extends Payment> FUNDING_LEG_FIXED_REC_1 = FUNDING_LEG_FIXED_REC_DEFINITION.toDerivative(REFERENCE_DATE_1); private static final Annuity<? extends Payment> FUNDING_LEG_FIXED_REC_2 = FUNDING_LEG_FIXED_REC_DEFINITION.toDerivative(REFERENCE_DATE_2); private static final BondTotalReturnSwap TRS_PAY_FIXED_REC_1 = new BondTotalReturnSwap(EFFECTIVE_TIME_1_1, TERMINATION_TIME_1_1, FUNDING_LEG_FIXED_REC_1, UKT14_1_1, -NOTIONAL_BND); private static final BondTotalReturnSwap TRS_PAY_FIXED_REC_2 = new BondTotalReturnSwap(EFFECTIVE_TIME_2_1, TERMINATION_TIME_2_1, FUNDING_LEG_FIXED_REC_2, UKT14_2_1, -NOTIONAL_BND); // Funding: unique fixed coupon in GBP: pay TRS bond, receive funding private static final CouponFixedDefinition FUNDING_FIXED_CPN_PAY_DEFINITION = new CouponFixedDefinition(UKT14_DEFINITION.getCurrency(), TERMINATION_DATE_1, EFFECTIVE_DATE_1, TERMINATION_DATE_1, 0.25, -NOTIONAL_TRS, RATE); private static final PaymentFixedDefinition FUNDING_FIXED_NTL_PAY_DEFINITION = new PaymentFixedDefinition(GBP, TERMINATION_DATE_1, -NOTIONAL_TRS); private static final AnnuityDefinition<? extends PaymentDefinition> FUNDING_LEG_FIXED_PAY_DEFINITION = new AnnuityDefinition<>(new PaymentDefinition[] {FUNDING_FIXED_CPN_PAY_DEFINITION, FUNDING_FIXED_NTL_PAY_DEFINITION }, UKT14_DEFINITION.getCalendar()); private static final Annuity<? extends Payment> FUNDING_LEG_FIXED_PAY_1 = FUNDING_LEG_FIXED_PAY_DEFINITION.toDerivative(REFERENCE_DATE_1); private static final BondTotalReturnSwap TRS_REC_FIXED_PAY_1 = new BondTotalReturnSwap(EFFECTIVE_TIME_1_1, TERMINATION_TIME_1_1, FUNDING_LEG_FIXED_PAY_1, UKT14_1_1, NOTIONAL_BND); // Funding: multiple USD Libor coupons private static final Calendar NYC = new CalendarUSD("NYC"); private static final double SPREAD = 0.0010; private static final IborIndex USDLIBOR1M = IndexIborMaster.getInstance().getIndex("USDLIBOR1M"); private static final Currency USD = USDLIBOR1M.getCurrency(); private static final AnnuityDefinition<CouponDefinition> FUNDING_LEG_IBOR_PAY_DEFINITION = AnnuityDefinitionBuilder.couponIborSpreadWithNotional(EFFECTIVE_DATE_2, TERMINATION_DATE_2, NOTIONAL_TRS, SPREAD, USDLIBOR1M, USDLIBOR1M.getDayCount(), USDLIBOR1M.getBusinessDayConvention(), true, USDLIBOR1M.getTenor(), USDLIBOR1M.isEndOfMonth(), NYC, StubType.SHORT_START, 0, false, true); private static final Annuity<? extends Payment> FUNDING_LEG_IBOR_PAY_1 = FUNDING_LEG_IBOR_PAY_DEFINITION.toDerivative(REFERENCE_DATE_1, FIXING_TS); private static final BondTotalReturnSwapDefinition TRS_REC_IBOR_PAY_DEFINITION = new BondTotalReturnSwapDefinition(EFFECTIVE_DATE_2, TERMINATION_DATE_2, FUNDING_LEG_IBOR_PAY_DEFINITION, UKT14_DEFINITION, NOTIONAL_BND); private static final BondTotalReturnSwap TRS_REC_IBOR_PAY_1_STD = new BondTotalReturnSwap(EFFECTIVE_TIME_1_2, TERMINATION_TIME_1_2, FUNDING_LEG_IBOR_PAY_1, UKT14_1_1, NOTIONAL_BND); private static final BondTotalReturnSwap TRS_REC_IBOR_PAY_1_EFF = TRS_REC_IBOR_PAY_DEFINITION.toDerivative(REFERENCE_DATE_1, FIXING_TS); private static final BondTotalReturnSwapDiscountingMethod METHOD_TRS_BND = BondTotalReturnSwapDiscountingMethod.getInstance(); private static final PresentValueIssuerCalculator PVIC = PresentValueIssuerCalculator.getInstance(); private static final PresentValueCurveSensitivityIssuerCalculator PVCSIC = PresentValueCurveSensitivityIssuerCalculator.getInstance(); private static final InstrumentDerivativeVisitor<ParameterIssuerProviderInterface, ReferenceAmount<Pair<String, Currency>>> PV01C = new PV01CurveParametersCalculator<>(PVCSIC); private static final PresentValueDiscountingCalculator PVDC = PresentValueDiscountingCalculator.getInstance(); private static final IssuerProviderDiscount ISSUER_MULTICURVE = IssuerProviderDiscountDataSets.getIssuerSpecificProvider(); private static final double TOLERANCE_PV = 1.0E-2; private static final double TOLERANCE_PV_DELTA = 1.0E+2; @Test public void presentValueFixedSameCurrencyBeforeEffective() { MultipleCurrencyAmount pvComputedPay = METHOD_TRS_BND.presentValue(TRS_PAY_FIXED_REC_1, ISSUER_MULTICURVE); assertEquals("BondTRSDiscountingMethod: present value", 1, pvComputedPay.size()); // Bond and funding in same currency assertTrue("BondTRSDiscountingMethod: present value", pvComputedPay.getAmount(GBP) != 0.0); MultipleCurrencyAmount pvBondUnit = UKT14_1_1.accept(PVIC, ISSUER_MULTICURVE); MultipleCurrencyAmount pvFunding = FUNDING_LEG_FIXED_REC_1.accept(PVDC, ISSUER_MULTICURVE.getMulticurveProvider()); MultipleCurrencyAmount pvExpected = pvBondUnit.multipliedBy(-NOTIONAL_BND).plus(pvFunding); assertEquals("BondTRSDiscountingMethod: present value", pvExpected.getAmount(GBP), pvComputedPay.getAmount(GBP), TOLERANCE_PV); MultipleCurrencyAmount pvComputedRec = METHOD_TRS_BND.presentValue(TRS_REC_FIXED_PAY_1, ISSUER_MULTICURVE); assertEquals("BondTRSDiscountingMethod: present value", -pvComputedPay.getAmount(GBP), pvComputedRec.getAmount(GBP), TOLERANCE_PV); } @Test public void presentValueFixedSameCurrencyAfterEffective() { MultipleCurrencyAmount pvComputed = METHOD_TRS_BND.presentValue(TRS_PAY_FIXED_REC_2, ISSUER_MULTICURVE); assertEquals("BondTRSDiscountingMethod: present value", 1, pvComputed.size()); // Bond and funding in same currency assertTrue("BondTRSDiscountingMethod: present value", pvComputed.getAmount(GBP) != 0.0); MultipleCurrencyAmount pvBondUnit = UKT14_2_1.accept(PVIC, ISSUER_MULTICURVE); MultipleCurrencyAmount pvFunding = FUNDING_LEG_FIXED_REC_2.accept(PVDC, ISSUER_MULTICURVE.getMulticurveProvider()); MultipleCurrencyAmount pvExpected = pvBondUnit.multipliedBy(-NOTIONAL_BND).plus(pvFunding); assertEquals("BondTRSDiscountingMethod: present value", pvExpected.getAmount(GBP), pvComputed.getAmount(GBP), TOLERANCE_PV); // Bond and funding in same currency } @Test public void presentValueIborDiffCurrencyBeforeEffective() { MultipleCurrencyAmount pvComputedRec = METHOD_TRS_BND.presentValue(TRS_REC_IBOR_PAY_1_EFF, ISSUER_MULTICURVE); assertEquals("BondTRSDiscountingMethod: present value", 2, pvComputedRec.size()); // Bond and funding in different currency assertTrue("BondTRSDiscountingMethod: present value", pvComputedRec.getAmount(GBP) != 0.0); assertTrue("BondTRSDiscountingMethod: present value", pvComputedRec.getAmount(USD) != 0.0); MultipleCurrencyAmount pvBondUnit = UKT14_1_2.accept(PVIC, ISSUER_MULTICURVE); MultipleCurrencyAmount pvFunding = FUNDING_LEG_IBOR_PAY_1.accept(PVDC, ISSUER_MULTICURVE.getMulticurveProvider()); MultipleCurrencyAmount pvExpected = pvBondUnit.multipliedBy(NOTIONAL_BND).plus(pvFunding); assertEquals("BondTRSDiscountingMethod: present value", pvExpected.getAmount(GBP), pvComputedRec.getAmount(GBP), TOLERANCE_PV); assertEquals("BondTRSDiscountingMethod: present value", pvFunding.getAmount(USD), pvComputedRec.getAmount(USD), TOLERANCE_PV); // Check that the coupon not in the effective period is not taken into account MultipleCurrencyAmount pvComputedRecStd = METHOD_TRS_BND.presentValue(TRS_REC_IBOR_PAY_1_STD, ISSUER_MULTICURVE); assertFalse("", Math.abs(pvComputedRec.getAmount(GBP) - pvComputedRecStd.getAmount(GBP)) < TOLERANCE_PV); } @Test public void presentValueLegs() { MultipleCurrencyAmount pvBondLegExpected = TRS_REC_IBOR_PAY_1_EFF.getAsset().accept(PVIC, ISSUER_MULTICURVE).multipliedBy(NOTIONAL_BND); MultipleCurrencyAmount pvBondLegComputed = METHOD_TRS_BND.presentValueAssetLeg(TRS_REC_IBOR_PAY_1_EFF, ISSUER_MULTICURVE); assertEquals("BondTRSDiscountingMethod: present value", pvBondLegExpected.getAmount(GBP), pvBondLegComputed.getAmount(GBP), TOLERANCE_PV); MultipleCurrencyAmount pvFundingLegExpected = TRS_REC_IBOR_PAY_1_EFF.getFundingLeg().accept(PVDC, ISSUER_MULTICURVE.getMulticurveProvider()); MultipleCurrencyAmount pvFundingLegComputed = METHOD_TRS_BND.presentValueFundingLeg(TRS_REC_IBOR_PAY_1_EFF, ISSUER_MULTICURVE); assertEquals("BondTRSDiscountingMethod: present value", pvFundingLegExpected.getAmount(USD), pvFundingLegComputed.getAmount(USD), TOLERANCE_PV); } @Test public void presentValueMethodVsCalculator() { MultipleCurrencyAmount pvMethod = METHOD_TRS_BND.presentValue(TRS_REC_IBOR_PAY_1_EFF, ISSUER_MULTICURVE); MultipleCurrencyAmount pvCalculator = TRS_REC_IBOR_PAY_1_EFF.accept(PVIC, ISSUER_MULTICURVE); assertEquals("BondTRSDiscountingMethod: present value", pvMethod.getAmount(GBP), pvCalculator.getAmount(GBP), TOLERANCE_PV); assertEquals("BondTRSDiscountingMethod: present value", pvMethod.getAmount(USD), pvCalculator.getAmount(USD), TOLERANCE_PV); } @Test public void presentValueCurveSensitivty() { MultipleCurrencyMulticurveSensitivity pvcsComputed = METHOD_TRS_BND.presentValueCurveSensitivity(TRS_REC_IBOR_PAY_1_EFF, ISSUER_MULTICURVE).cleaned(); MultipleCurrencyMulticurveSensitivity pvcsFundingLeg = TRS_REC_IBOR_PAY_1_EFF.getFundingLeg().accept(PVCSIC, ISSUER_MULTICURVE).cleaned(); AssertSensitivityObjects.assertEquals("BondTRSDiscountingMethod: present value curve senstivity", pvcsFundingLeg.getSensitivity(USD), pvcsComputed.getSensitivity(USD), TOLERANCE_PV_DELTA); MultipleCurrencyMulticurveSensitivity pvcsBondLeg = TRS_REC_IBOR_PAY_1_EFF.getAsset().accept(PVCSIC, ISSUER_MULTICURVE).multipliedBy(NOTIONAL_BND).cleaned(); AssertSensitivityObjects.assertEquals("BondTRSDiscountingMethod: present value curve senstivity", pvcsBondLeg.getSensitivity(GBP), pvcsComputed.getSensitivity(GBP), TOLERANCE_PV_DELTA); } @Test public void pv01() { ReferenceAmount<Pair<String, Currency>> pv01Computed = TRS_REC_IBOR_PAY_1_EFF.accept(PV01C, ISSUER_MULTICURVE); ReferenceAmount<Pair<String, Currency>> pv01Funding = TRS_REC_IBOR_PAY_1_EFF.getFundingLeg().accept(PV01C, ISSUER_MULTICURVE); ReferenceAmount<Pair<String, Currency>> pv01Bond = TRS_REC_IBOR_PAY_1_EFF.getAsset().accept(PV01C, ISSUER_MULTICURVE); @SuppressWarnings("unused") int t = 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.resourcemanager.slotmanager; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.clusterframework.types.ResourceProfile; import org.apache.flink.runtime.clusterframework.types.SlotID; import org.apache.flink.runtime.concurrent.ManuallyTriggeredScheduledExecutor; import org.apache.flink.runtime.instance.InstanceID; import org.apache.flink.runtime.resourcemanager.ResourceManagerId; import org.apache.flink.runtime.resourcemanager.SlotRequest; import org.apache.flink.runtime.resourcemanager.registration.TaskExecutorConnection; import org.apache.flink.runtime.taskexecutor.SlotReport; import org.apache.flink.runtime.taskexecutor.SlotStatus; import org.apache.flink.runtime.taskexecutor.TaskExecutorGateway; import org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGatewayBuilder; import org.apache.flink.util.TestLogger; import org.apache.flink.util.function.RunnableWithException; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; /** Test suite for idle task managers release in slot manager. */ public class TaskManagerCheckInSlotManagerTest extends TestLogger { private static final ResourceID resourceID = ResourceID.generate(); private static final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); private static final SlotID slotId = new SlotID(resourceID, 0); private static final ResourceProfile resourceProfile = ResourceProfile.fromResources(1.0, 1); private static final SlotStatus slotStatus = new SlotStatus(slotId, resourceProfile); private static final SlotReport slotReport = new SlotReport(slotStatus); private final AtomicReference<CompletableFuture<Boolean>> canBeReleasedFuture = new AtomicReference<>(); private final TaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setCanBeReleasedSupplier(canBeReleasedFuture::get) .createTestingTaskExecutorGateway(); private final TaskExecutorConnection taskManagerConnection = new TaskExecutorConnection(resourceID, taskExecutorGateway); private CompletableFuture<InstanceID> releaseFuture; private ResourceActions resourceManagerActions; private ManuallyTriggeredScheduledExecutor mainThreadExecutor; private final AtomicInteger allocateResourceCalls = new AtomicInteger(0); private final AtomicInteger releaseResourceCalls = new AtomicInteger(0); @Before public void setup() { canBeReleasedFuture.set(new CompletableFuture<>()); releaseFuture = new CompletableFuture<>(); allocateResourceCalls.getAndSet(0); releaseResourceCalls.getAndSet(0); resourceManagerActions = new TestingResourceActionsBuilder() .setReleaseResourceConsumer( (instanceID, e) -> { releaseFuture.complete(instanceID); releaseResourceCalls.incrementAndGet(); }) .setAllocateResourceConsumer( ignored -> allocateResourceCalls.incrementAndGet()) .build(); mainThreadExecutor = new ManuallyTriggeredScheduledExecutor(); } /** * Tests that idle task managers time out after the configured timeout. A timed out task manager * will be removed from the slot manager and the resource manager will be notified about the * timeout, if it can be released. */ @Test public void testTaskManagerTimeout() throws Exception { checkTaskManagerTimeout(0); } /** * If there is no job running, no need to keep redundant taskManagers and release the timeout * taskManagers. This may happen in session mode without jobs running. */ @Test public void testTaskManagerTimeoutWithRedundantTaskManager() throws Exception { checkTaskManagerTimeout(1); } /** * Register four taskManagers that all have two slots. For taskManager0, both slots are free. * For taskManager1, both slots are allocated. For taskManager2, One slot is allocated, the * other is free. For taskManager3, one slot is free, the other is allocated. If * redundantTaskManagerNum is 0, the idle taskManager should be released. * * @throws Exception */ @Test public void testTaskManagerTimeoutWithZeroRedundantTaskManager() throws Exception { registerAndCheckMultiTaskManagers(0); assertThat(allocateResourceCalls.get(), is(0)); assertThat(releaseResourceCalls.get(), is(1)); } /** * Register four taskManagers that all have two slots. For taskManager0, both slots are free. * For taskManager1, both slots are allocated. For taskManager2, One slot is allocated, the * other is free. For taskManager3, one slot is free, the other is allocated. If * redundantTaskManagerNum is 1, two free slots are needed and the idle taskManager should be * released. * * @throws Exception */ @Test public void testTaskManagerTimeoutWithOneRedundantTaskManager() throws Exception { registerAndCheckMultiTaskManagers(1); assertThat(allocateResourceCalls.get(), is(0)); assertThat(releaseResourceCalls.get(), is(1)); } /** * Register four taskManagers that all have two slots. For taskManager0, both slots are free. * For taskManager1, both slots are allocated. For taskManager2, One slot is allocated, the * other is free. For taskManager3, one slot is free, the other is allocated. If * redundantTaskManagerNum is 2, four free slots can satisfy the requirement. * * @throws Exception */ @Test public void testTaskManagerTimeoutWithTwoRedundantTaskManager() throws Exception { registerAndCheckMultiTaskManagers(2); assertThat(allocateResourceCalls.get(), is(0)); assertThat(releaseResourceCalls.get(), is(0)); } /** * Register four taskManagers that all have two slots. For taskManager0, both slots are free. * For taskManager1, both slots are allocated. For taskManager2, One slot is allocated, the * other is free. For taskManager3, one slot is free, the other is allocated. If * redundantTaskManagerNum is 3, two more free slots are needed and another taskManager should * be allocated. * * @throws Exception */ @Test public void testTaskManagerTimeoutWithThreeRedundantTaskManager() throws Exception { registerAndCheckMultiTaskManagers(3); assertThat(allocateResourceCalls.get(), is(1)); assertThat(releaseResourceCalls.get(), is(0)); } /** * Tests that idle but not releasable task managers will not be released even if timed out * before it can be. */ @Test public void testTaskManagerIsNotReleasedBeforeItCanBe() throws Exception { try (SlotManagerImpl slotManager = createAndStartSlotManagerWithTM()) { checkTaskManagerTimeoutWithCustomCanBeReleasedResponse(slotManager, false); verifyTmReleased(false); checkTaskManagerTimeoutWithCustomCanBeReleasedResponse(slotManager, true); verifyTmReleased(true); } } /** * Tests that idle task managers will not be released after "can be" check in case of concurrent * resource allocations. */ @Test public void testTaskManagerIsNotReleasedInCaseOfConcurrentAllocation() throws Exception { try (SlotManagerImpl slotManager = createAndStartSlotManagerWithTM()) { checkTaskManagerTimeoutWithCustomCanBeReleasedResponse( slotManager, true, () -> { // Allocate and free slot between triggering TM.canBeReleased request and // receiving response. // There can be potentially newly unreleased partitions, therefore TM can // not be released yet. AllocationID allocationID = new AllocationID(); slotManager.registerSlotRequest( new SlotRequest( new JobID(), allocationID, resourceProfile, "foobar")); mainThreadExecutor.triggerAll(); // The test case can be unstable w/o this sleep, because // TaskManagerRegistration.idleSince, which is set to // System.currentTimeMillis(), may not change after occupying and // releasing the slot. Thread.sleep(1); slotManager.freeSlot(slotId, allocationID); }); verifyTmReleased(false); checkTaskManagerTimeoutWithCustomCanBeReleasedResponse(slotManager, true); verifyTmReleased(true); } } private void checkTaskManagerTimeout(int redundantTaskManagerNum) throws Exception { canBeReleasedFuture.set(CompletableFuture.completedFuture(true)); try (SlotManager slotManager = SlotManagerBuilder.newBuilder() .setTaskManagerTimeout(Time.milliseconds(10L)) .setRedundantTaskManagerNum(redundantTaskManagerNum) .buildAndStartWithDirectExec(resourceManagerId, resourceManagerActions)) { slotManager.registerTaskManager( taskManagerConnection, slotReport, ResourceProfile.ANY, ResourceProfile.ANY); assertThat(releaseFuture.get(), is(equalTo(taskManagerConnection.getInstanceID()))); } } /** * Register four taskManagers that all have two slots. The difference between the taskManagers * is whether the slot is allocated. To maintain redundantTaskManagerNum, SlotManagerImpl may * release or allocate taskManagers. * * @param redundantTaskManagerNum * @throws Exception */ private void registerAndCheckMultiTaskManagers(int redundantTaskManagerNum) throws Exception { SlotManagerImpl slotManager = createAndStartSlotManager(redundantTaskManagerNum, 2); // Both slots are free. registerTaskManagerWithTwoSlots(slotManager, true, true); // Both slots are allocated. registerTaskManagerWithTwoSlots(slotManager, false, false); // One slot is allocated, the other is free. registerTaskManagerWithTwoSlots(slotManager, false, true); // One slot is free, the other is allocated. registerTaskManagerWithTwoSlots(slotManager, true, false); checkTaskManagerTimeoutWithCustomCanBeReleasedResponse(slotManager, true); } private void registerTaskManagerWithTwoSlots( SlotManagerImpl slotManager, boolean slot0Free, boolean slot1Free) { canBeReleasedFuture.set(new CompletableFuture<>()); ResourceID resourceID = ResourceID.generate(); ResourceProfile resourceProfile = ResourceProfile.fromResources(1.0, 1); JobID jobID = new JobID(); SlotID slotId0 = new SlotID(resourceID, 0); SlotStatus slotStatus0 = slot0Free ? new SlotStatus(slotId0, resourceProfile) : new SlotStatus(slotId0, resourceProfile, jobID, new AllocationID()); SlotID slotId1 = new SlotID(resourceID, 1); SlotStatus slotStatus1 = slot1Free ? new SlotStatus(slotId1, resourceProfile) : new SlotStatus(slotId1, resourceProfile, jobID, new AllocationID()); SlotReport slotReport = new SlotReport(Arrays.asList(slotStatus0, slotStatus1)); TaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setCanBeReleasedSupplier(canBeReleasedFuture::get) .createTestingTaskExecutorGateway(); TaskExecutorConnection taskManagerConnection = new TaskExecutorConnection(resourceID, taskExecutorGateway); mainThreadExecutor.execute( () -> slotManager.registerTaskManager( taskManagerConnection, slotReport, ResourceProfile.ANY, ResourceProfile.ANY)); } private SlotManagerImpl createAndStartSlotManagerWithTM() { SlotManagerImpl slotManager = createAndStartSlotManager(0, 1); mainThreadExecutor.execute( () -> slotManager.registerTaskManager( taskManagerConnection, slotReport, ResourceProfile.ANY, ResourceProfile.ANY)); return slotManager; } private SlotManagerImpl createAndStartSlotManager( int redundantTaskManagerNum, int numSlotsPerWorker) { SlotManagerImpl slotManager = SlotManagerBuilder.newBuilder() .setScheduledExecutor(mainThreadExecutor) .setTaskManagerTimeout(Time.milliseconds(0L)) .setRedundantTaskManagerNum(redundantTaskManagerNum) .setNumSlotsPerWorker(numSlotsPerWorker) .build(); slotManager.start(resourceManagerId, mainThreadExecutor, resourceManagerActions); return slotManager; } private void checkTaskManagerTimeoutWithCustomCanBeReleasedResponse( SlotManagerImpl slotManager, boolean canBeReleased) throws Exception { checkTaskManagerTimeoutWithCustomCanBeReleasedResponse( slotManager, canBeReleased, () -> {}); } private void checkTaskManagerTimeoutWithCustomCanBeReleasedResponse( SlotManagerImpl slotManager, boolean canBeReleased, RunnableWithException doAfterCheckTriggerBeforeCanBeReleasedResponse) throws Exception { canBeReleasedFuture.set(new CompletableFuture<>()); mainThreadExecutor.execute( slotManager::checkTaskManagerTimeoutsAndRedundancy); // trigger TM.canBeReleased // request mainThreadExecutor.triggerAll(); doAfterCheckTriggerBeforeCanBeReleasedResponse.run(); canBeReleasedFuture.get().complete(canBeReleased); // finish TM.canBeReleased request mainThreadExecutor.triggerAll(); } private void verifyTmReleased(boolean isTmReleased) { assertThat(releaseFuture.isDone(), is(isTmReleased)); if (isTmReleased) { assertThat(releaseFuture.join(), is(equalTo(taskManagerConnection.getInstanceID()))); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import org.mockito.Mockito; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_MESSAGE_FIELD; import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD; import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD; import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class TrackingResultProcessorTests extends ESTestCase { private IngestDocument ingestDocument; private List<SimulateProcessorResult> resultList; @Before public void init() { ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); resultList = new ArrayList<>(); } public void testActualProcessor() throws Exception { TestProcessor actualProcessor = new TestProcessor(ingestDocument -> {}); TrackingResultProcessor trackingProcessor = new TrackingResultProcessor(false, actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); assertThat(actualProcessor.getInvokedCounter(), equalTo(1)); assertThat(resultList.size(), equalTo(1)); assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(0).getFailure(), nullValue()); assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); } public void testActualCompoundProcessorWithoutOnFailure() throws Exception { RuntimeException exception = new RuntimeException("processor failed"); TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); try { trackingProcessor.execute(ingestDocument); fail("processor should throw exception"); } catch (ElasticsearchException e) { assertThat(e.getRootCause().getMessage(), equalTo(exception.getMessage())); } SimulateProcessorResult expectedFirstResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(1)); assertThat(resultList.size(), equalTo(1)); assertThat(resultList.get(0).getIngestDocument(), nullValue()); assertThat(resultList.get(0).getFailure(), equalTo(exception)); assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFirstResult.getProcessorTag())); } public void testActualCompoundProcessorWithOnFailure() throws Exception { RuntimeException exception = new RuntimeException("fail"); TestProcessor failProcessor = new TestProcessor("fail", "test", exception); TestProcessor onFailureProcessor = new TestProcessor("success", "test", ingestDocument -> {}); CompoundProcessor actualProcessor = new CompoundProcessor(false, Arrays.asList(new CompoundProcessor(false, Arrays.asList(failProcessor, onFailureProcessor), Arrays.asList(onFailureProcessor, failProcessor))), Arrays.asList(onFailureProcessor)); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(failProcessor.getTag(), ingestDocument); SimulateProcessorResult expectedSuccessResult = new SimulateProcessorResult(onFailureProcessor.getTag(), ingestDocument); assertThat(failProcessor.getInvokedCounter(), equalTo(2)); assertThat(onFailureProcessor.getInvokedCounter(), equalTo(2)); assertThat(resultList.size(), equalTo(4)); assertThat(resultList.get(0).getIngestDocument(), nullValue()); assertThat(resultList.get(0).getFailure(), equalTo(exception)); assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag())); Map<String, Object> metadata = resultList.get(1).getIngestDocument().getIngestMetadata(); assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); assertThat(resultList.get(1).getFailure(), nullValue()); assertThat(resultList.get(1).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag())); assertThat(resultList.get(2).getIngestDocument(), nullValue()); assertThat(resultList.get(2).getFailure(), equalTo(exception)); assertThat(resultList.get(2).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag())); metadata = resultList.get(3).getIngestDocument().getIngestMetadata(); assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); assertThat(resultList.get(3).getFailure(), nullValue()); assertThat(resultList.get(3).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag())); } public void testActualCompoundProcessorWithIgnoreFailure() throws Exception { RuntimeException exception = new RuntimeException("processor failed"); TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); CompoundProcessor actualProcessor = new CompoundProcessor(true, Collections.singletonList(testProcessor), Collections.emptyList()); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(1)); assertThat(resultList.size(), equalTo(1)); assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(0).getFailure(), sameInstance(exception)); assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); } public void testActualCompoundProcessorWithFalseConditional() throws Exception { String key1 = randomAlphaOfLength(10); String key2 = randomAlphaOfLength(10); String key3 = randomAlphaOfLength(10); String scriptName = "conditionalScript"; ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> false), Collections.emptyMap())), new HashMap<>(ScriptModule.CORE_CONTEXTS) ); CompoundProcessor compoundProcessor = new CompoundProcessor( new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), new ConditionalProcessor( randomAlphaOfLength(10), new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService, new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key2, randomInt()); })), new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })); CompoundProcessor trackingProcessor = decorate(compoundProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(compoundProcessor.getTag(), ingestDocument); //the step for key 2 is never executed due to conditional and thus not part of the result set assertThat(resultList.size(), equalTo(2)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); assertFalse(resultList.get(1).getIngestDocument().hasField(key2)); assertTrue(resultList.get(1).getIngestDocument().hasField(key3)); assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(1).getFailure(), nullValue()); assertThat(resultList.get(1).getProcessorTag(), nullValue()); } public void testActualPipelineProcessor() throws Exception { String pipelineId = "pipeline1"; IngestService ingestService = mock(IngestService.class); Map<String, Object> pipelineConfig = new HashMap<>(); pipelineConfig.put("name", pipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); String key2 = randomAlphaOfLength(10); String key3 = randomAlphaOfLength(10); Pipeline pipeline = new Pipeline( pipelineId, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key2, randomInt()); }), new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })) ); when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId); assertThat(resultList.size(), equalTo(3)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); assertTrue(resultList.get(1).getIngestDocument().hasField(key2)); assertFalse(resultList.get(1).getIngestDocument().hasField(key3)); assertThat(resultList.get(2).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(2).getFailure(), nullValue()); assertThat(resultList.get(2).getProcessorTag(), nullValue()); } public void testActualPipelineProcessorWithTrueConditional() throws Exception { String pipelineId1 = "pipeline1"; String pipelineId2 = "pipeline2"; IngestService ingestService = mock(IngestService.class); Map<String, Object> pipelineConfig0 = new HashMap<>(); pipelineConfig0.put("name", pipelineId1); Map<String, Object> pipelineConfig1 = new HashMap<>(); pipelineConfig1.put("name", pipelineId1); Map<String, Object> pipelineConfig2 = new HashMap<>(); pipelineConfig2.put("name", pipelineId2); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); String key2 = randomAlphaOfLength(10); String key3 = randomAlphaOfLength(10); String scriptName = "conditionalScript"; ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> true), Collections.emptyMap())), new HashMap<>(ScriptModule.CORE_CONTEXTS) ); Pipeline pipeline1 = new Pipeline( pipelineId1, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), new ConditionalProcessor( randomAlphaOfLength(10), new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService, factory.create(Collections.emptyMap(), null, pipelineConfig2)), new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key3, randomInt()); }) ) ); Pipeline pipeline2 = new Pipeline( pipelineId2, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); }))); when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1); when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1); verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId2); assertThat(resultList.size(), equalTo(3)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); assertTrue(resultList.get(1).getIngestDocument().hasField(key2)); assertFalse(resultList.get(1).getIngestDocument().hasField(key3)); assertThat(resultList.get(2).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(2).getFailure(), nullValue()); assertThat(resultList.get(2).getProcessorTag(), nullValue()); } public void testActualPipelineProcessorWithFalseConditional() throws Exception { String pipelineId1 = "pipeline1"; String pipelineId2 = "pipeline2"; IngestService ingestService = mock(IngestService.class); Map<String, Object> pipelineConfig0 = new HashMap<>(); pipelineConfig0.put("name", pipelineId1); Map<String, Object> pipelineConfig1 = new HashMap<>(); pipelineConfig1.put("name", pipelineId1); Map<String, Object> pipelineConfig2 = new HashMap<>(); pipelineConfig2.put("name", pipelineId2); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); String key2 = randomAlphaOfLength(10); String key3 = randomAlphaOfLength(10); String scriptName = "conditionalScript"; ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> false), Collections.emptyMap())), new HashMap<>(ScriptModule.CORE_CONTEXTS) ); Pipeline pipeline1 = new Pipeline( pipelineId1, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), new ConditionalProcessor( randomAlphaOfLength(10), new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService, factory.create(Collections.emptyMap(), null, pipelineConfig2)), new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key3, randomInt()); }) ) ); Pipeline pipeline2 = new Pipeline( pipelineId2, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); }))); when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1); when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1); verify(ingestService, Mockito.never()).getPipeline(pipelineId2); assertThat(resultList.size(), equalTo(2)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); assertFalse(resultList.get(1).getIngestDocument().hasField(key2)); assertTrue(resultList.get(1).getIngestDocument().hasField(key3)); assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(1).getFailure(), nullValue()); assertThat(resultList.get(1).getProcessorTag(), nullValue()); } public void testActualPipelineProcessorWithHandledFailure() throws Exception { RuntimeException exception = new RuntimeException("processor failed"); String pipelineId = "pipeline1"; IngestService ingestService = mock(IngestService.class); Map<String, Object> pipelineConfig = new HashMap<>(); pipelineConfig.put("name", pipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); String key2 = randomAlphaOfLength(10); String key3 = randomAlphaOfLength(10); Pipeline pipeline = new Pipeline( pipelineId, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); }), new CompoundProcessor( false, Collections.singletonList(new TestProcessor(ingestDocument -> { throw exception; })), Collections.singletonList(new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); })) ), new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })) ); when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId); assertThat(resultList.size(), equalTo(4)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); //failed processor assertNull(resultList.get(1).getIngestDocument()); assertThat(resultList.get(1).getFailure().getMessage(), equalTo(exception.getMessage())); assertTrue(resultList.get(2).getIngestDocument().hasField(key1)); assertTrue(resultList.get(2).getIngestDocument().hasField(key2)); assertFalse(resultList.get(2).getIngestDocument().hasField(key3)); assertThat(resultList.get(3).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(3).getFailure(), nullValue()); assertThat(resultList.get(3).getProcessorTag(), nullValue()); } public void testActualPipelineProcessorWithCycle() throws Exception { String pipelineId1 = "pipeline1"; String pipelineId2 = "pipeline2"; IngestService ingestService = mock(IngestService.class); Map<String, Object> pipelineConfig0 = new HashMap<>(); pipelineConfig0.put("name", pipelineId1); Map<String, Object> pipelineConfig1 = new HashMap<>(); pipelineConfig1.put("name", pipelineId1); Map<String, Object> pipelineConfig2 = new HashMap<>(); pipelineConfig2.put("name", pipelineId2); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Pipeline pipeline1 = new Pipeline( pipelineId1, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, pipelineConfig2))); Pipeline pipeline2 = new Pipeline( pipelineId2, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, pipelineConfig1))); when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1); when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> trackingProcessor.execute(ingestDocument)); assertThat(exception.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(exception.getCause().getCause(), instanceOf(IllegalStateException.class)); assertThat(exception.getMessage(), containsString("Cycle detected for pipeline: pipeline1")); } public void testActualPipelineProcessorRepeatedInvocation() throws Exception { String pipelineId = "pipeline1"; IngestService ingestService = mock(IngestService.class); Map<String, Object> pipelineConfig = new HashMap<>(); pipelineConfig.put("name", pipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); Pipeline pipeline = new Pipeline( pipelineId, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); })) ); when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor, pipelineProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId); assertThat(resultList.size(), equalTo(2)); assertThat(resultList.get(0).getIngestDocument(), not(equalTo(expectedResult.getIngestDocument()))); assertThat(resultList.get(0).getFailure(), nullValue()); assertThat(resultList.get(0).getProcessorTag(), nullValue()); assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); assertThat(resultList.get(1).getFailure(), nullValue()); assertThat(resultList.get(1).getProcessorTag(), nullValue()); //each invocation updates key1 with a random int assertNotEquals(resultList.get(0).getIngestDocument().getSourceAndMetadata().get(key1), resultList.get(1).getIngestDocument().getSourceAndMetadata().get(key1)); } }
package services.sso; import models.sso.User; import models.sso.UserCredentials; import services.sso.annotations.ExclusionDictionary; import services.sso.annotations.ExclusionSubstrings; import javax.inject.Inject; import javax.inject.Provider; import javax.inject.Singleton; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.Query; import java.util.Arrays; import java.util.Set; /** * User service. */ @Singleton public class UserService implements Paginatable<User> { /** * Entity manager provider. */ final Provider<EntityManager> entityManagerProvider; /** * Exclusion dictionary for username check. */ final Set<String> usernameExclusionDictionary; /** * Exclusion substrings for username check. */ final Set<String> usernameExclusionSubstrings; /** * Password service. */ final PasswordService passwordService; /** * Constructs user service. * * @param entityManagerProvider Entity manager provider. * @param usernameExclusionDictionary Username exclusion dictionary. * @param passwordService Password service. */ @Inject public UserService( Provider<EntityManager> entityManagerProvider, @ExclusionDictionary Set<String> usernameExclusionDictionary, @ExclusionSubstrings Set<String> usernameExclusionSubstrings, PasswordService passwordService) { this.entityManagerProvider = entityManagerProvider; this.usernameExclusionDictionary = usernameExclusionDictionary; this.usernameExclusionSubstrings = usernameExclusionSubstrings; this.passwordService = passwordService; } /** * Returns user by given id or null. * * @param id Id of the user. * @return User or null when the user was not found. */ public User get(Long id) { return entityManagerProvider.get().find(User.class, id); } /** * Returns user by string that contains username or email. * * @param emailOrUsername String with email or username. * @return User or null if there is no such user in database. */ public User getUserByEmailOrUsername(String emailOrUsername) { emailOrUsername = emailOrUsername.toLowerCase().trim(); if (emailOrUsername.indexOf('@') >= 0) { return getByEmail(emailOrUsername); } return getByUsername(emailOrUsername); } /** * Returns user with given email or null if there is no such user. * * @param email Email. * @return User with given email or null if there is no such user. */ public User getByEmail(String email) { Query q = entityManagerProvider.get().createNamedQuery("User.getByEmail"); q.setParameter("email", email.toLowerCase().trim()); q.setMaxResults(1); try { return (User) q.getSingleResult(); } catch (NoResultException nre) { return null; } } /** * Returns user with given username or null if there is no such user. * * @param username Username. * @return User with given username or null if there is no such user. */ public User getByUsername(String username) { Query q = entityManagerProvider.get().createNamedQuery("User.getByUsername"); q.setParameter("username", username.toLowerCase().trim()); q.setMaxResults(1); try { return (User) q.getSingleResult(); } catch (NoResultException nre) { return null; } } /** * Returns user with given phone or null if there is no such user. * * @param phone Phone. * @return User with given phone or null if there is no such user. */ public User getByPhone(String phone) { Query q = entityManagerProvider.get().createNamedQuery("User.getByPhone"); q.setParameter("phone", phone.toLowerCase().trim()); q.setMaxResults(1); try { return (User) q.getSingleResult(); } catch (NoResultException nre) { return null; } } /** * Creates new user in a database and returns attached entity (create). * * @param user User to save. * @return Created user (as as argument, attached instance). */ public User createNew(User user, String password) { byte[] salt = passwordService.newSalt(); UserCredentials credentials = new UserCredentials(); credentials.setPasswordSalt(salt); credentials.setPasswordHash(passwordService.passwordHash(password, salt)); EntityManager em = entityManagerProvider.get(); em.persist(user); em.flush(); credentials.setUserId(user.getId()); em.persist(credentials); em.flush(); return user; } /** * Tests if the given username is acceptable. * * @param username Username to test for availability. * @return Whether the given username is available. */ public boolean isUsernameAcceptable(String username) { if (username == null) { return false; } username = username.trim().toLowerCase(); if (username.isEmpty()) { return false; } if (usernameExclusionDictionary.contains(username)) { return false; } for (String exclusionSubstring : usernameExclusionSubstrings) { if (username.contains(exclusionSubstring)) { return false; } } return true; } /** * Checks if the given user password is valid. * * @param user User to check. * @param password Password to check. * @return Whether the given password is a valid user password. */ public boolean isValidPassword(User user, String password) { UserCredentials credentials = getCredentials(user); if (credentials == null) { return false; } byte[] passwordHash = passwordService.passwordHash(password, credentials.getPasswordSalt()); return Arrays.equals(passwordHash, credentials.getPasswordHash()); } /** * Updates user password. * * @param user User. * @param password New password. * @return Updated user entity. */ public User updatePassword(User user, String password) { UserCredentials credentials = getCredentials(user); if (credentials == null) { credentials = new UserCredentials(); credentials.setUserId(user.getId()); } credentials.setPasswordSalt(passwordService.newSalt()); credentials.setPasswordHash(passwordService.passwordHash(password, credentials.getPasswordSalt())); entityManagerProvider.get().persist(credentials); entityManagerProvider.get().flush(); return user; } /** * Updates user password and changes user's status to confirmed. * Since the link to password restoration is sent via email, account becomes verified (confirmed). * * @param user User. * @param password New password. * @return Updated user entity. */ public User updatePasswordAndConfirm(User user, String password) { updatePassword(user, password); user.confirm(); return update(user); } /** * Updates existing user. * * @param user User to update. */ public User update(User user) { entityManagerProvider.get().persist(user); entityManagerProvider.get().flush(); return user; } /** * Updates existing user with last used locale. Uses simple update query to avoid whole user update. * * @param user User to update. * @param lastUsedLocale Last used locale. */ public void updateLastUsedLocale(User user, String lastUsedLocale) { entityManagerProvider.get() .createNamedQuery("User.updateLastUsedLocale") .setParameter("userId", user.getId()) .setParameter("lastUsedLocale", lastUsedLocale) .executeUpdate(); } /** * Removes user events. * * @param user User who's events to remove. * @return Number of events removed. */ public int removeUserEvents(User user) { return entityManagerProvider.get().createNamedQuery("UserEvent.removeByUser") .setParameter("userId", user.getId()) .executeUpdate(); } /** * Returns user credentials object for given user. * * @param user User. * @return Credentials for given user. */ public UserCredentials getCredentials(User user) { return getCredentials(user.getId()); } /** * Returns user credentials object for given user id. * * @param userId User id. * @return Credentials for given user. */ public UserCredentials getCredentials(long userId) { return entityManagerProvider.get().find(UserCredentials.class, userId); } @Override public String getEntityCountAllQueryName() { return "User.countAll"; } @Override public String getEntityAllQueryName() { return "User.all"; } @Override public String getEntityCountSearchQueryName() { return "User.countSearch"; } @Override public String getEntitySearchQueryName() { return "User.search"; } @Override public Provider<EntityManager> getEntityManagerProvider() { return entityManagerProvider; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.resourcemanager; import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.clusterframework.FlinkResourceManager; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.heartbeat.HeartbeatServices; import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServices; import org.apache.flink.runtime.instance.HardwareDescription; import org.apache.flink.runtime.leaderelection.LeaderElectionService; import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService; import org.apache.flink.runtime.metrics.MetricRegistryImpl; import org.apache.flink.runtime.resourcemanager.slotmanager.SlotManager; import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagerInfo; import org.apache.flink.runtime.rpc.FatalErrorHandler; import org.apache.flink.runtime.rpc.TestingRpcService; import org.apache.flink.runtime.registration.RegistrationResponse; import org.apache.flink.runtime.rpc.exceptions.FencingTokenException; import org.apache.flink.runtime.taskexecutor.SlotReport; import org.apache.flink.runtime.taskexecutor.TaskExecutorGateway; import org.apache.flink.runtime.taskexecutor.TaskExecutorRegistrationSuccess; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.util.TestingFatalErrorHandler; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.TestLogger; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class ResourceManagerTaskExecutorTest extends TestLogger { private final Time timeout = Time.seconds(10L); private TestingRpcService rpcService; private SlotReport slotReport = new SlotReport(); private int dataPort = 1234; private HardwareDescription hardwareDescription = new HardwareDescription(1, 2L, 3L, 4L); private static String taskExecutorAddress = "/taskExecutor1"; private ResourceID taskExecutorResourceID; private ResourceID resourceManagerResourceID; private StandaloneResourceManager resourceManager; private ResourceManagerGateway rmGateway; private ResourceManagerGateway wronglyFencedGateway; private TestingFatalErrorHandler testingFatalErrorHandler; @Before public void setup() throws Exception { rpcService = new TestingRpcService(); taskExecutorResourceID = mockTaskExecutor(taskExecutorAddress); resourceManagerResourceID = ResourceID.generate(); testingFatalErrorHandler = new TestingFatalErrorHandler(); TestingLeaderElectionService rmLeaderElectionService = new TestingLeaderElectionService(); resourceManager = createAndStartResourceManager(rmLeaderElectionService, testingFatalErrorHandler); rmGateway = resourceManager.getSelfGateway(ResourceManagerGateway.class); wronglyFencedGateway = rpcService.connect(resourceManager.getAddress(), ResourceManagerId.generate(), ResourceManagerGateway.class) .get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); grantLeadership(rmLeaderElectionService).get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); } @After public void teardown() throws Exception { rpcService.stopService(); } /** * Test receive normal registration from task executor and receive duplicate registration * from task executor. */ @Test public void testRegisterTaskExecutor() throws Exception { try { // test response successful CompletableFuture<RegistrationResponse> successfulFuture = rmGateway.registerTaskExecutor(taskExecutorAddress, taskExecutorResourceID, slotReport, dataPort, hardwareDescription, timeout); RegistrationResponse response = successfulFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertTrue(response instanceof TaskExecutorRegistrationSuccess); final TaskManagerInfo taskManagerInfo = rmGateway.requestTaskManagerInfo( taskExecutorResourceID, timeout).get(); assertThat(taskManagerInfo.getResourceId(), equalTo(taskExecutorResourceID)); // test response successful with instanceID not equal to previous when receive duplicate registration from taskExecutor CompletableFuture<RegistrationResponse> duplicateFuture = rmGateway.registerTaskExecutor(taskExecutorAddress, taskExecutorResourceID, slotReport, dataPort, hardwareDescription, timeout); RegistrationResponse duplicateResponse = duplicateFuture.get(); assertTrue(duplicateResponse instanceof TaskExecutorRegistrationSuccess); assertNotEquals(((TaskExecutorRegistrationSuccess) response).getRegistrationId(), ((TaskExecutorRegistrationSuccess) duplicateResponse).getRegistrationId()); } finally { if (testingFatalErrorHandler.hasExceptionOccurred()) { testingFatalErrorHandler.rethrowError(); } } } /** * Test receive registration with unmatched leadershipId from task executor */ @Test public void testRegisterTaskExecutorWithUnmatchedLeaderSessionId() throws Exception { try { // test throw exception when receive a registration from taskExecutor which takes unmatched leaderSessionId CompletableFuture<RegistrationResponse> unMatchedLeaderFuture = wronglyFencedGateway.registerTaskExecutor(taskExecutorAddress, taskExecutorResourceID, slotReport, dataPort, hardwareDescription, timeout); try { unMatchedLeaderFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); fail("Should have failed because we are using a wrongly fenced ResourceManagerGateway."); } catch (ExecutionException e) { assertTrue(ExceptionUtils.stripExecutionException(e) instanceof FencingTokenException); } } finally { if (testingFatalErrorHandler.hasExceptionOccurred()) { testingFatalErrorHandler.rethrowError(); } } } /** * Test receive registration with invalid address from task executor */ @Test public void testRegisterTaskExecutorFromInvalidAddress() throws Exception { try { // test throw exception when receive a registration from taskExecutor which takes invalid address String invalidAddress = "/taskExecutor2"; CompletableFuture<RegistrationResponse> invalidAddressFuture = rmGateway.registerTaskExecutor(invalidAddress, taskExecutorResourceID, slotReport, dataPort, hardwareDescription, timeout); assertTrue(invalidAddressFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS) instanceof RegistrationResponse.Decline); } finally { if (testingFatalErrorHandler.hasExceptionOccurred()) { testingFatalErrorHandler.rethrowError(); } } } private ResourceID mockTaskExecutor(String taskExecutorAddress) { TaskExecutorGateway taskExecutorGateway = mock(TaskExecutorGateway.class); when(taskExecutorGateway.getAddress()).thenReturn(taskExecutorAddress); ResourceID taskExecutorResourceID = ResourceID.generate(); rpcService.registerGateway(taskExecutorAddress, taskExecutorGateway); return taskExecutorResourceID; } private StandaloneResourceManager createAndStartResourceManager(LeaderElectionService rmLeaderElectionService, FatalErrorHandler fatalErrorHandler) throws Exception { TestingHighAvailabilityServices highAvailabilityServices = new TestingHighAvailabilityServices(); HeartbeatServices heartbeatServices = new HeartbeatServices(5L, 5L); highAvailabilityServices.setResourceManagerLeaderElectionService(rmLeaderElectionService); ResourceManagerConfiguration resourceManagerConfiguration = new ResourceManagerConfiguration( Time.seconds(5L), Time.seconds(5L)); SlotManager slotManager = new SlotManager( rpcService.getScheduledExecutor(), TestingUtils.infiniteTime(), TestingUtils.infiniteTime(), TestingUtils.infiniteTime()); MetricRegistryImpl metricRegistry = mock(MetricRegistryImpl.class); JobLeaderIdService jobLeaderIdService = new JobLeaderIdService( highAvailabilityServices, rpcService.getScheduledExecutor(), Time.minutes(5L)); StandaloneResourceManager resourceManager = new StandaloneResourceManager( rpcService, FlinkResourceManager.RESOURCE_MANAGER_NAME, resourceManagerResourceID, resourceManagerConfiguration, highAvailabilityServices, heartbeatServices, slotManager, metricRegistry, jobLeaderIdService, fatalErrorHandler); resourceManager.start(); return resourceManager; } private CompletableFuture<UUID> grantLeadership(TestingLeaderElectionService leaderElectionService) { UUID leaderSessionId = UUID.randomUUID(); return leaderElectionService.isLeader(leaderSessionId); } }
package com.zimbra.qa.selenium.projects.desktop.ui.mail; import java.util.List; import com.zimbra.qa.selenium.framework.items.*; import com.zimbra.qa.selenium.framework.items.RecipientItem.RecipientType; import com.zimbra.qa.selenium.framework.ui.*; import com.zimbra.qa.selenium.framework.util.*; import com.zimbra.qa.selenium.framework.util.GeneralUtility.WAIT_FOR_OPERAND; import com.zimbra.qa.selenium.framework.util.staf.Stafpostqueue; import com.zimbra.qa.selenium.projects.desktop.ui.*; /** * The <code>FormMailNew<code> object defines a compose new message view * in the Zimbra Ajax client. * <p> * This class can be used to compose a new message. * <p> * * @author Matt Rhoades * @see http://wiki.zimbra.com/wiki/Testing:_Selenium:_ZimbraSelenium_Overview#Mail_Page */ public class FormMailNew extends AbsForm { /** * Defines Selenium locators for various objects in {@link FormMailNew} */ public static class Locators { public static final String zSendIconBtn = "css=[id^=zb__COMPOSE][id$=__SEND_title]"; public static final String zCancelIconBtn = "css=[id^=zb__COMPOSE][id$=__CANCEL_title]"; public static final String zSaveDraftIconBtn = "css=[id^=zb__COMPOSE][id$=__SAVE_DRAFT_title]"; public static final String zSpellCheckIconBtn = "css=[id^=zb__COMPOSE][id$=__SPELL_CHECK_title]"; public static final String zToField = "css=[id^=zv__COMPOSE][id$=_to_control]"; public static final String zCcField = "css=[id^=zv__COMPOSE][id$=_cc_control]"; public static final String zBccField = "css=[id^=zv__COMPOSE][id$=_bcc_control]"; public static final String zSubjectField = "css=div[id^=zv__COMPOSE] input[id$=_subject_control]"; public static final String zAttachmentField = "css=div[id$=_attachments_div]"; public static final String zAttachmentImage = "css=div[id$=_attachments_div] div[class='ImgAttachment']"; public static final String zAttachmentCheckbox = "css=div[id$=_attachments_div] input[name='ZmComposeView_forAttName1']"; public static final String zAttachmentText = "css=div[id$=_attachments_div] a[class='AttLink']:contains("; public static final String zLinkText = "css=iframe[id*='DWT'][class*='Editor']"; public static final String zBodyFrameHTML = "//div[contains(id,'zv__COMPOSE')]//iframe"; public static final String zPriorityPulldown = "css=[id*='__COMPOSE'][id$='___priority_dropdown']"; public static final String zPriorityOptionHigh = "css=[id*='__COMPOSE'][id$='___priority_dropdown']"; public static final String zPriorityOptionNormal = "css=[id*='__COMPOSE'][id$='___priority_dropdown']"; public static final String zPriorityOptionLow = "css=[id*='__COMPOSE'][id$='___priority_dropdown']"; public static final String zBubbleToField = "css=[id^=zv__COMPOSE][id$=_to_cell]"; public static final String zBubbleCcField = "css=[id^=zv__COMPOSE][id$=_cc_cell]"; public static final String zBubbleBccField = "css=[id^=zv__COMPOSE][id$=_bcc_cell]"; } public static class Field { public static final Field To = new Field("To"); public static final Field Cc = new Field("Cc"); public static final Field Bcc = new Field("Bcc"); public static final Field Subject = new Field("Subject"); public static final Field Body = new Field("Body"); private String field; private Field(String name) { field = name; } @Override public String toString() { return (field); } } /** * Protected constuctor for this object. Only classes within * this package should create DisplayMail objects. * * @param application */ public FormMailNew(AbsApplication application) { super(application); logger.info("new " + FormMailNew.class.getCanonicalName()); } @Override public String myPageName() { return (this.getClass().getName()); } @Override public void zSubmit() throws HarnessException { logger.info("FormMailNew.submit()"); zToolbarPressButton(Button.B_SEND); this.zWaitForBusyOverlay(); } /** * Press the toolbar button * @param button * @return * @throws HarnessException */ public AbsPage zToolbarPressButton(Button button) throws HarnessException { logger.info(myPageName() + " zToolbarPressButton("+ button +")"); tracer.trace("Click button "+ button); if ( button == null ) throw new HarnessException("Button cannot be null!"); // Fallthrough objects AbsPage page = null; String locator = null; if ( button == Button.B_SEND ) { locator = Locators.zSendIconBtn; // Click on send this.zClick(locator); this.zWaitForBusyOverlay(); // Wait for the message to be delivered Stafpostqueue sp = new Stafpostqueue(); sp.waitForPostqueue(); return (page); } else if ( button == Button.B_CANCEL ) { locator = Locators.zCancelIconBtn; page = new DialogWarning(DialogWarning.DialogWarningID.SaveCurrentMessageAsDraft, this.MyApplication, ((AppAjaxClient)this.MyApplication).zPageMail); // If the compose view is not dirty (i.e. no pending changes) // then the dialog will not appear. So, click the button // and return the page, without waiting for it to be active this.zClick(locator); this.zWaitForBusyOverlay(); // Return the page, if specified return (page); } else if ( button == Button.B_SAVE_DRAFT ) { locator = Locators.zSaveDraftIconBtn; page = this; // FALL THROUGH } else if ( button == Button.B_ADD_ATTACHMENT ) { throw new HarnessException("implement me (?)"); // FALL THROUGH } else if ( button == Button.B_SPELL_CHECK ) { locator = Locators.zSpellCheckIconBtn; page = this; // FALL THROUGH } else if ( button == Button.B_SIGNATURE ) { throw new HarnessException("use zToolbarPressPulldown to attach signature"); } else if ( button == Button.B_OPTIONS ) { throw new HarnessException("use zToolbarPressPulldown to attach signature"); } else if ( button == Button.B_SHOWBCC) { page = this; locator = "xpath=//div[contains(@id,'zv__COMPOSE')]//a[contains(@id,'_toggle_bcc')]"; if ( zBccIsActive() ) return (this); //// // For some reason, zClick doesn't work for "Show BCC", but sClick does //// // Click it this.sClick(locator); this.zWaitForBusyOverlay(); return (page); } else { throw new HarnessException("no logic defined for button "+ button); } // Make sure a locator was set if ( locator == null ) throw new HarnessException("locator was null for button "+ button); // Default behavior, process the locator by clicking on it // // Click it this.zClick(locator); // if the app is busy, wait for it to become active again this.zWaitForBusyOverlay(); if ( page != null ) { // Make sure the page becomes active page.zWaitForActive(); } // Return the page, if specified return (page); } /** * Press the toolbar pulldown and the menu option * @param pulldown * @param option * @return * @throws HarnessException */ public AbsPage zToolbarPressPulldown(Button pulldown, Button option) throws HarnessException { logger.info(myPageName() + " zToolbarPressPulldown("+ pulldown +", "+ option +")"); tracer.trace("Click pulldown "+ pulldown +" then "+ option); if ( pulldown == null ) throw new HarnessException("Pulldown cannot be null!"); if ( option == null ) throw new HarnessException("Option cannot be null!"); // Default behavior variables // String pulldownLocator = null; // If set, this will be expanded String optionLocator = null; // If set, this will be clicked AbsPage page = null; // If set, this page will be returned // Based on the button specified, take the appropriate action(s) // if ( pulldown == Button.B_PRIORITY ) { if ( option == Button.O_PRIORITY_HIGH ) { // TODO pulldownLocator = Locators.zPriorityPulldown; // Have to use xpath because there is no unique identifier to select the text "High" and by using xpath, it selects the text "high" through the sibling relationship. // When using the css to point to the icon, it clicks on the outside of the drop down menu // , therefore it ends up closing and selecting nothing optionLocator = "//div[@class='ImgPriorityHigh_list']/../../td[@class='ZWidgetTitle']"; page = this; } else if ( option == Button.O_PRIORITY_NORMAL ) { // TODO pulldownLocator = Locators.zPriorityPulldown; optionLocator = "css=[class='ImgPriorityNormal_list']"; page = this; } else if ( option == Button.O_PRIORITY_LOW ) { // TODO pulldownLocator = Locators.zPriorityPulldown; optionLocator = "css=[class='ImgPriorityLow_list']"; page = this; } else { throw new HarnessException("unsupported priority option "+ option); } } else { throw new HarnessException("no logic defined for pulldown "+ pulldown); } // Default behavior if ( pulldownLocator != null ) { // Make sure the locator exists if ( !this.sIsElementPresent(pulldownLocator) ) { throw new HarnessException("Button "+ pulldown +" option "+ option +" pulldownLocator "+ pulldownLocator +" not present!"); } this.zClick(pulldownLocator); this.zWaitForBusyOverlay(); if ( optionLocator != null ) { // Make sure the locator exists if ( !this.sIsElementPresent(optionLocator) ) { throw new HarnessException("Button "+ pulldown +" option "+ option +" optionLocator "+ optionLocator +" not present!"); } this.zClick(optionLocator); this.zWaitForBusyOverlay(); } // If we click on pulldown/option and the page is specified, then // wait for the page to go active if ( page != null ) { page.zWaitForActive(); } } // Return the specified page, or null if not set return (page); } /** * Fill in the form field with the specified text * @param field * @param value * @throws HarnessException */ public void zFillField(Field field, String value, String... textToWait) throws HarnessException { tracer.trace("Set "+ field +" to "+ value); String locator = null; if (textToWait != null) { int frames = this.sGetXpathCount("//iframe"); logger.debug("Body: # of frames: "+ frames); String tempLocator = null; boolean html = false; try { if ( frames == 0 ) { //// // Text compose //// tempLocator = "//textarea[contains(@id,'textarea_')]"; } else if ( frames >= 1 ) { //// // HTML //// html = true; this.sSelectFrame("index=0"); tempLocator = "//html//body"; } GeneralUtility.waitForElementPresent(this, tempLocator); for (int i = 0; i < textToWait.length; i++) { // Wait for text Object [] params = new Object [] {tempLocator}; logger.info("message: " + this.sGetText(tempLocator)); GeneralUtility.waitFor(null, this, false, "sGetText", params, WAIT_FOR_OPERAND.CONTAINS, textToWait[i], 30000, 1000); } } finally { if (html) { this.sSelectFrame("relative=top"); } } } if ( field == Field.To ) { locator = Locators.zToField; // FALL THROUGH } else if ( field == Field.Cc ) { locator = Locators.zCcField; // FALL THROUGH } else if ( field == Field.Bcc ) { locator = Locators.zBccField; // Make sure the BCC field is showing if ( !zBccIsActive() ) { this.zToolbarPressButton(Button.B_SHOWBCC); } // FALL THROUGH } else if ( field == Field.Subject ) { locator = Locators.zSubjectField; // FALL THROUGH } else if ( field == Field.Body ) { int frames = this.sGetXpathCount("//iframe"); logger.debug("Body: # of frames: "+ frames); if ( frames == 0 ) { //// // Text compose //// locator = "//textarea[contains(@id,'textarea_')]"; if ( !this.sIsElementPresent(locator)) throw new HarnessException("Unable to locate compose body"); this.sFocus(locator); this.zClickAt(locator, "0,0"); this.zWaitForBusyOverlay(); this.zTypeKeys(locator, value); return; } else if ( frames == 1 ) { //// // HTML compose //// try { this.sSelectFrame("index=0"); // iframe index is 0 based locator = "//html//body"; if ( !this.sIsElementPresent(locator)) throw new HarnessException("Unable to locate compose body"); this.sFocus(locator); this.zClickAt(locator, "0,0"); this.zTypeKeys(locator, value); } finally { // Make sure to go back to the original iframe this.sSelectFrame("relative=top"); } // Is this requried? this.zWaitForBusyOverlay(); return; } else { throw new HarnessException("Compose //iframe count was "+ frames); } } else { throw new HarnessException("not implemented for field "+ field); } if ( locator == null ) { throw new HarnessException("locator was null for field "+ field); } // Default behavior, enter value into locator field // // Make sure the button exists if ( !this.sIsElementPresent(locator) ) throw new HarnessException("Field is not present field="+ field +" locator="+ locator); // Enter text this.sType(locator, value); this.zWaitForBusyOverlay(); } private boolean zBccIsActive() throws HarnessException { logger.info(myPageName() + ".zBccIsActive()"); // <tr id='zv__COMPOSEX_bcc_row' style='display: table_row' x-display='table-row' ... // <tr id='zv__COMPOSEX_bcc_row' style='display: none' x-display='table-row' ... String xpath = "//div[contains(@id,'zv__COMPOSE')]//tr[contains(@id,'_bcc_row')]"; if ( !sIsElementPresent(xpath) ) throw new HarnessException("Unable to locate the BCC field "+ xpath); String locator = "xpath=("+ xpath +")@style"; String style = this.sGetAttribute(locator); logger.info(myPageName() + ".zBccIsActive() ... style="+ style); return (!style.contains("none")); } @Override public void zFill(IItem item) throws HarnessException { zFill(item, null); } public void zFill(IItem item, String... textToWait) throws HarnessException { logger.info(myPageName() + ".zFill(ZimbraItem)"); logger.info(item.prettyPrint()); // Make sure the item is a MailItem if ( !(item instanceof MailItem) ) { throw new HarnessException("Invalid item type - must be MailItem"); } // Convert object to MailItem MailItem mail = (MailItem) item; // Fill out the form // // Handle the subject if ( mail.dSubject != null ) { zFillField(Field.Subject, mail.dSubject, textToWait); } if ( mail.dBodyText != null ) { zFillField(Field.Body, mail.dBodyText, textToWait); } if ( mail.dBodyHtml != null ) { zFillField(Field.Body, mail.dBodyHtml, textToWait); } // TODO: how to handle HTML body? // Handle the Recipient list, which can be a combination // of To, Cc, Bcc, and From StringBuilder to = null; StringBuilder cc = null; StringBuilder bcc = null; StringBuilder from = null; // Convert the list of recipients to a semicolon separated string List<RecipientItem> recipients = mail.dAllRecipients(); if ( recipients != null ) { if ( !recipients.isEmpty() ) { for (RecipientItem r : recipients) { if ( r.dType == RecipientType.To ) { if ( to == null ) { to = new StringBuilder(); to.append(r.dEmailAddress); } else { to.append(";").append(r.dEmailAddress); } } if ( r.dType == RecipientType.Cc ) { if ( cc == null ) { cc = new StringBuilder(); cc.append(r.dEmailAddress); } else { cc.append(";").append(r.dEmailAddress); } } if ( r.dType == RecipientType.Bcc ) { if ( bcc == null ) { bcc = new StringBuilder(); bcc.append(r.dEmailAddress); } else { bcc.append(";").append(r.dEmailAddress); } } if ( r.dType == RecipientType.From ) { if ( from == null ) { from = new StringBuilder(); from.append(r.dEmailAddress); } else { from.append(";").append(r.dEmailAddress); } } } } } // Fill out the To field if ( to != null ) { this.zFillField(Field.To, to.toString()); } if ( cc != null ) { this.zFillField(Field.Cc, cc.toString()); } if ( bcc != null ) { this.zFillField(Field.Bcc, bcc.toString()); } } @Override public boolean zIsActive() throws HarnessException { logger.info(myPageName() + " zIsActive()"); // Look for the div String locator = "css=div[id^='ztb__COMPOSE']"; if ( !this.sIsElementPresent(locator) ) { return (false); } if ( !this.zIsVisiblePerPosition(locator, 0, 0) ) { return (false); } logger.info(myPageName() + " zIsActive() = true"); return (true); } public boolean zHasAttachment(String name) throws HarnessException { return (Boolean)GeneralUtility.waitFor(null, this, false, "zIsAttachmentReady", new Object[] {name}, WAIT_FOR_OPERAND.EQ, true, 30000, 1000); } public boolean zIsAttachmentReady(String name) throws HarnessException { //verify clipper image existed, checkbox is checked, and attachment file name return sIsElementPresent(Locators.zAttachmentImage) && sIsChecked(Locators.zAttachmentCheckbox) && sIsElementPresent(Locators.zAttachmentText + "'" + name + "'" + ")"); } }
package au.org.aurin.wif.restclient.demand; import java.util.HashSet; import java.util.List; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.springframework.web.client.HttpClientErrorException; import org.testng.Assert; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import au.org.aurin.wif.exception.config.WifInvalidConfigException; import au.org.aurin.wif.exception.validate.WifInvalidInputException; import au.org.aurin.wif.io.SslUtil; import au.org.aurin.wif.model.Projection; import au.org.aurin.wif.model.WifProject; import au.org.aurin.wif.model.allocation.AllocationLU; import au.org.aurin.wif.model.demand.AreaRequirement; import au.org.aurin.wif.model.demand.DemandConfig; import au.org.aurin.wif.model.demand.DemandScenario; import au.org.aurin.wif.model.demand.DemographicTrend; import au.org.aurin.wif.model.demand.EmploymentSector; import au.org.aurin.wif.model.demand.data.EmploymentData; import au.org.aurin.wif.model.demand.data.ProjectedData; import au.org.aurin.wif.model.demand.info.DemandInfo; import au.org.aurin.wif.model.demand.info.EmploymentDemandInfo; import au.org.aurin.wif.model.demand.info.ResidentialDemandInfo; import au.org.aurin.wif.model.reports.demand.DemandAnalysisReport; import au.org.aurin.wif.restclient.ProjectServiceClient; import au.org.aurin.wif.svc.WifKeys; /** * The Class DemandScenarioServiceRestIT. */ @ContextConfiguration(locations = { "/test-integration-client-context.xml" }) public class DemandScenarioServiceRestIT extends AbstractTestNGSpringContextTests { /** The Constant LOGGER. */ private static final Logger LOGGER = LoggerFactory .getLogger(DemandScenarioServiceRestIT.class); /** The demand scenario service client. */ @Autowired private DemandScenarioServiceClient demandScenarioServiceClient; /** The project service client. */ @Autowired private ProjectServiceClient projectServiceClient; /** The demand scenario id. */ private String demandScenarioId; /** The role id. */ private final String roleId = "aurin"; /** The demand scenario label. */ private String demandScenarioLabel = "demandScenarioTest474533%##$%%18"; /** The wif project id. */ String wifProjectId; /** The demand config service client. */ @Autowired private DemandConfigServiceClient demandConfigServiceClient; /** * Setup. * * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception */ @BeforeClass(enabled = false, groups = { "restclienttest", "demand", "demandScenario" }) public void setup() throws WifInvalidInputException, WifInvalidConfigException { LOGGER.debug("setup: "); SslUtil.trustSelfSignedSSL(); LOGGER.debug("setup: "); wifProjectId = WifKeys.TEST_PROJECT_ID; final WifProject project = projectServiceClient.getProjectConfiguration( roleId, wifProjectId); Assert.assertEquals(project.getLabel(), "Demonstration"); Assert.assertNotNull(project.getCreationDate()); Assert.assertNotNull(project.getId()); } /** * Creates the demand scenario. * * @throws Exception * the exception */ @Test(enabled = false, groups = { "restclienttest", "demand", "demandScenario" }) public void createDemandScenario() throws Exception { LOGGER.debug("createDemandScenario: "); final WifProject project = projectServiceClient.getProjectConfiguration( roleId, wifProjectId); // Creating dummy scenario final AllocationLU residentialLowLU = project .getExistingLandUseByLabel("Low Density Res."); final AllocationLU regionalRetailLU = project .getExistingLandUseByLabel("Regional Retail"); final DemandConfig demandConfig = demandConfigServiceClient .getDemandConfig(roleId, WifKeys.TEST_PROJECT_ID); Assert.assertNotNull(demandConfig.getId()); demandScenarioLabel = "DemandScenarioRestId" + System.currentTimeMillis(); final EmploymentSector sectorRetailTrade = demandConfig .getSectorByLabel("Retail Trade"); final Projection projection0 = demandConfig.getProjectionByLabel("2005"); final Projection projection1 = demandConfig.getProjectionByLabel("2010"); final Projection projection2 = demandConfig.getProjectionByLabel("2015"); final DemographicTrend highGrowthDemographicTrend = demandConfig .getTrendByLabel("High Growth Trend"); final DemandScenario testDemandScenario = new DemandScenario(); testDemandScenario.setFeatureFieldName("High Growth"); testDemandScenario.setLabel("High Growth"); testDemandScenario.setDemographicTrendLabel("High Growth Trend"); testDemandScenario.setProjectId(project.getId()); testDemandScenario.setDemographicTrendLabel(highGrowthDemographicTrend .getLabel()); // low residential information for demand final ResidentialDemandInfo rdinfo = new ResidentialDemandInfo(); rdinfo.setInfillRate(0.0); rdinfo.setFutureBreakdownByHType(0.60); rdinfo.setCurrentDensity(0.94); rdinfo.setFutureDensity(1.0); rdinfo.setFutureVacancyRate(0.0963); rdinfo.setResidentialLUId(residentialLowLU.getId()); final EmploymentDemandInfo edinfoARetailTrade = new EmploymentDemandInfo(); edinfoARetailTrade.setSectorLabel(sectorRetailTrade.getLabel()); edinfoARetailTrade.setCurrentDensity(9.85); edinfoARetailTrade.setFutureDensity(9.85); edinfoARetailTrade.setInfillRate(0.0); final Set<ProjectedData> empProysA = new HashSet<ProjectedData>(); edinfoARetailTrade.setProjectedDatas(empProysA); edinfoARetailTrade.setDemandScenario(testDemandScenario); final Set<DemandInfo> empDemandInfosRR = new HashSet<DemandInfo>(); regionalRetailLU.setDemandInfos(empDemandInfosRR); regionalRetailLU.addDemandInfo(edinfoARetailTrade); final EmploymentData emProjRR = new EmploymentData(); emProjRR.setEmploymentInfo(edinfoARetailTrade); emProjRR.setEmployees(5771); emProjRR.setProjectionLabel(projection0.getLabel()); final EmploymentData emProjRR1 = new EmploymentData(); emProjRR1.setEmploymentInfo(edinfoARetailTrade); emProjRR1.setEmployees(6558); emProjRR1.setProjectionLabel(projection1.getLabel()); final EmploymentData emProjRR2 = new EmploymentData(); emProjRR2.setEmploymentInfo(edinfoARetailTrade); emProjRR2.setEmployees(7182); emProjRR2.setProjectionLabel(projection2.getLabel()); edinfoARetailTrade.addProjectedData(emProjRR); edinfoARetailTrade.addProjectedData(emProjRR1); edinfoARetailTrade.addProjectedData(emProjRR2); testDemandScenario.getDemandInfos().add(edinfoARetailTrade); testDemandScenario.getDemandInfos().add(rdinfo); testDemandScenario.setLabel(demandScenarioLabel); demandScenarioId = demandScenarioServiceClient.createDemandScenario(roleId, wifProjectId, testDemandScenario); Assert.assertNotNull(demandScenarioId); final WifProject project2 = projectServiceClient.getProjectConfiguration( roleId, wifProjectId); Assert.assertNotNull(project2.getDemandScenariosMap()); Assert.assertTrue(project2.getDemandScenariosMap().containsKey( demandScenarioId)); } /** * Gets the demand scenario. * * @return the demand scenario * @throws Exception * the exception */ @Test(enabled = false, groups = { "restclienttest", "demand", "demandScenario" }, dependsOnMethods = { "createDemandScenario" }) public void getDemandScenario() throws Exception { final DemandScenario demandScenario = demandScenarioServiceClient .getDemandScenario(roleId, wifProjectId, demandScenarioId); Assert.assertEquals(demandScenario.getLabel(), demandScenarioLabel); final WifProject project2 = projectServiceClient.getProjectConfiguration( roleId, wifProjectId); Assert.assertTrue(project2.getDemandScenariosMap().containsKey( demandScenarioId)); } /** * Gets the demand scenarios for project. * * @return the demand scenarios for project * @throws Exception * the exception */ @Test(enabled = false, groups = { "restclienttest", "demand", "demandScenario" }, dependsOnMethods = "getDemandScenario") public void getDemandScenariosForProject() throws Exception { final List<DemandScenario> list = demandScenarioServiceClient .getDemandScenariosForProject(roleId, wifProjectId); LOGGER.debug("DemandScenarios = " + list.size()); Assert.assertNotEquals(list.size(), 1); } /** * Update demand scenario. * * @throws Exception * the exception */ @Test(enabled = false, groups = { "restclienttest", "demand", "demandScenario" }, dependsOnMethods = { "getDemandScenariosForProject" }) public void updateDemandScenario() throws Exception { final DemandScenario demandScenario = demandScenarioServiceClient .getDemandScenario(roleId, wifProjectId, demandScenarioId); demandScenario.setLabel("demandScenario REST test"); demandScenarioServiceClient.updateDemandScenario(roleId, wifProjectId, demandScenarioId, demandScenario); final DemandScenario tmp = demandScenarioServiceClient.getDemandScenario( roleId, wifProjectId, demandScenarioId); Assert.assertEquals(tmp.getLabel(), "demandScenario REST test"); } /** * Gets the outcome. * * @return the outcome * @throws Exception * the exception */ @Test(enabled = false, groups = { "restclienttest", "demand", "demandScenario" }, dependsOnMethods = "updateDemandScenario") public void getOutcome() throws Exception { final List<AreaRequirement> outcome = demandScenarioServiceClient .getDemandScenarioOutcome(roleId, wifProjectId, demandScenarioId); Assert.assertNotNull(outcome); LOGGER.debug("outcome size" + outcome.size()); Assert.assertEquals(outcome.size(), 6); } /** * Gets the demand scenario report. * * @return the demand scenario report * @throws Exception * the exception */ @Test(enabled = false, groups = { "restclienttest", "demand", "demandScenario" }, dependsOnMethods = { "createDemandScenario" }) public void getDemandScenarioReport() throws Exception { final DemandAnalysisReport demandAnalysisReport = demandScenarioServiceClient .getDemandScenarioReport(roleId, WifKeys.TEST_PROJECT_ID, WifKeys.TEST_DEMAND_SCENARIO_ID); Assert.assertEquals(demandAnalysisReport.getReportType(), "DemandScenario"); } /** * Delete demand scenario. * * @throws Exception * the exception */ @Test(enabled = false, alwaysRun = true, groups = { "restclienttest", "demand", "demandScenario" }, dependsOnMethods = { "updateDemandScenario", "getOutcome" }, expectedExceptions = HttpClientErrorException.class) public void deleteDemandScenario() throws Exception { demandScenarioServiceClient.deleteDemandScenario(roleId, wifProjectId, demandScenarioId); final WifProject project2 = projectServiceClient.getProjectConfiguration( roleId, wifProjectId); Assert.assertNotNull(project2.getDemandScenariosMap()); Assert.assertFalse(project2.getDemandScenariosMap().containsKey( demandScenarioId)); Assert.assertNull(demandScenarioServiceClient.getDemandScenario(roleId, wifProjectId, demandScenarioId)); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver12; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFOxmMplsTcMaskedVer12 implements OFOxmMplsTcMasked { private static final Logger logger = LoggerFactory.getLogger(OFOxmMplsTcMaskedVer12.class); // version: 1.2 final static byte WIRE_VERSION = 3; final static int LENGTH = 6; private final static U8 DEFAULT_VALUE = U8.ZERO; private final static U8 DEFAULT_VALUE_MASK = U8.ZERO; // OF message fields private final U8 value; private final U8 mask; // // Immutable default instance final static OFOxmMplsTcMaskedVer12 DEFAULT = new OFOxmMplsTcMaskedVer12( DEFAULT_VALUE, DEFAULT_VALUE_MASK ); // package private constructor - used by readers, builders, and factory OFOxmMplsTcMaskedVer12(U8 value, U8 mask) { if(value == null) { throw new NullPointerException("OFOxmMplsTcMaskedVer12: property value cannot be null"); } if(mask == null) { throw new NullPointerException("OFOxmMplsTcMaskedVer12: property mask cannot be null"); } this.value = value; this.mask = mask; } // Accessors for OF message fields @Override public long getTypeLen() { return 0x80004702L; } @Override public U8 getValue() { return value; } @Override public U8 getMask() { return mask; } @Override public MatchField<U8> getMatchField() { return MatchField.MPLS_TC; } @Override public boolean isMasked() { return true; } public OFOxm<U8> getCanonical() { if (U8.NO_MASK.equals(mask)) { return new OFOxmMplsTcVer12(value); } else if(U8.FULL_MASK.equals(mask)) { return null; } else { return this; } } @Override public OFVersion getVersion() { return OFVersion.OF_12; } public OFOxmMplsTcMasked.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFOxmMplsTcMasked.Builder { final OFOxmMplsTcMaskedVer12 parentMessage; // OF message fields private boolean valueSet; private U8 value; private boolean maskSet; private U8 mask; BuilderWithParent(OFOxmMplsTcMaskedVer12 parentMessage) { this.parentMessage = parentMessage; } @Override public long getTypeLen() { return 0x80004702L; } @Override public U8 getValue() { return value; } @Override public OFOxmMplsTcMasked.Builder setValue(U8 value) { this.value = value; this.valueSet = true; return this; } @Override public U8 getMask() { return mask; } @Override public OFOxmMplsTcMasked.Builder setMask(U8 mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<U8> getMatchField() { return MatchField.MPLS_TC; } @Override public boolean isMasked() { return true; } @Override public OFOxm<U8> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.2"); } @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFOxmMplsTcMasked build() { U8 value = this.valueSet ? this.value : parentMessage.value; if(value == null) throw new NullPointerException("Property value must not be null"); U8 mask = this.maskSet ? this.mask : parentMessage.mask; if(mask == null) throw new NullPointerException("Property mask must not be null"); // return new OFOxmMplsTcMaskedVer12( value, mask ); } } static class Builder implements OFOxmMplsTcMasked.Builder { // OF message fields private boolean valueSet; private U8 value; private boolean maskSet; private U8 mask; @Override public long getTypeLen() { return 0x80004702L; } @Override public U8 getValue() { return value; } @Override public OFOxmMplsTcMasked.Builder setValue(U8 value) { this.value = value; this.valueSet = true; return this; } @Override public U8 getMask() { return mask; } @Override public OFOxmMplsTcMasked.Builder setMask(U8 mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<U8> getMatchField() { return MatchField.MPLS_TC; } @Override public boolean isMasked() { return true; } @Override public OFOxm<U8> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.2"); } @Override public OFVersion getVersion() { return OFVersion.OF_12; } // @Override public OFOxmMplsTcMasked build() { U8 value = this.valueSet ? this.value : DEFAULT_VALUE; if(value == null) throw new NullPointerException("Property value must not be null"); U8 mask = this.maskSet ? this.mask : DEFAULT_VALUE_MASK; if(mask == null) throw new NullPointerException("Property mask must not be null"); return new OFOxmMplsTcMaskedVer12( value, mask ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFOxmMplsTcMasked> { @Override public OFOxmMplsTcMasked readFrom(ChannelBuffer bb) throws OFParseError { // fixed value property typeLen == 0x80004702L int typeLen = bb.readInt(); if(typeLen != (int) 0x80004702) throw new OFParseError("Wrong typeLen: Expected=0x80004702L(0x80004702L), got="+typeLen); U8 value = U8.of(bb.readByte()); U8 mask = U8.of(bb.readByte()); OFOxmMplsTcMaskedVer12 oxmMplsTcMaskedVer12 = new OFOxmMplsTcMaskedVer12( value, mask ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", oxmMplsTcMaskedVer12); return oxmMplsTcMaskedVer12; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFOxmMplsTcMaskedVer12Funnel FUNNEL = new OFOxmMplsTcMaskedVer12Funnel(); static class OFOxmMplsTcMaskedVer12Funnel implements Funnel<OFOxmMplsTcMaskedVer12> { private static final long serialVersionUID = 1L; @Override public void funnel(OFOxmMplsTcMaskedVer12 message, PrimitiveSink sink) { // fixed value property typeLen = 0x80004702L sink.putInt((int) 0x80004702); message.value.putTo(sink); message.mask.putTo(sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFOxmMplsTcMaskedVer12> { @Override public void write(ChannelBuffer bb, OFOxmMplsTcMaskedVer12 message) { // fixed value property typeLen = 0x80004702L bb.writeInt((int) 0x80004702); bb.writeByte(message.value.getRaw()); bb.writeByte(message.mask.getRaw()); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFOxmMplsTcMaskedVer12("); b.append("value=").append(value); b.append(", "); b.append("mask=").append(mask); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFOxmMplsTcMaskedVer12 other = (OFOxmMplsTcMaskedVer12) obj; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; if (mask == null) { if (other.mask != null) return false; } else if (!mask.equals(other.mask)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); result = prime * result + ((mask == null) ? 0 : mask.hashCode()); return result; } }
/* * Copyright (c) 2012-2013, Pierre-Yves Chibon * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Wageningen University nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ''AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package nl.wur.plantbreeding.gff2RDF; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.vocabulary.RDF; import java.util.logging.Logger; import nl.wur.plantbreeding.gff2RDF.Arabidopsis.At_GeneProtein; import nl.wur.plantbreeding.gff2RDF.object.Gene; import nl.wur.plantbreeding.gff2RDF.object.Marker; import org.apache.commons.lang.StringEscapeUtils; /** * This class handles the conversion from the object used in this programm * to the RDF model. * @author Pierre-Yves Chibon -- py@chibon.fr */ public class ObjectToModel { /** Logger used for outputing log information. */ private static final Logger LOG = Logger.getLogger( ObjectToModel.class.getName()); /** * This is the based URI which will be used in the construction of the * model. */ private final String uri = new App().getUri(); /** The uri used to specify gene type and gene attributes. */ private final String geneuri = uri + "GENE#"; /** The uri used to specify scaffold type and scaffold attributes. */ private final String scaffolduri = uri + "SCAFFOLD#"; /** The uri used to specify marker type and marker attributes. */ private final String markeruri = uri + "MARKER#"; /** The uri used to specify position type and position attributes. */ private final String positionuri = uri + "POSITION#"; /** The uri used to specify map position type and map position attributes. */ private final String mappositionuri = uri + "MAPPOSITION#"; /** The uri used to specify GO type and GO attribute. * This is the URI used by geneontology.org */ private final String gouri = "http://purl.org/obo/owl/GO#"; /** The uri used to specify protein type and protein attribute. * This is the URI used by uniprot. */ private final String proteinuri = "http://purl.uniprot.org/uniprot/"; /** * This method add the given Arabidopsis thaliana gene information to the * provided Jena model and return this model. * @param geneobj an Arabidopsis thaliana gene with information * @param model a Jena Model * @return a Jena Model with the gene information */ public final Model addToModel(final Gene geneobj, final Model model) { // Set the different URI that will be used // Create the scaffold node, add type and name Resource scaffold = model.createResource(scaffolduri + geneobj.getChromosome()); scaffold.addProperty(RDF.type, scaffolduri); if (geneobj.getChromosome() != null && !geneobj.getChromosome().isEmpty()) { scaffold.addProperty(model.createProperty(scaffolduri + "ScaffoldName"), geneobj.getChromosome()); } // Create the gene node and add the type and basic information Resource gene = model.createResource(geneuri + geneobj.getLocus()); gene.addProperty(RDF.type, geneuri); gene.addProperty(model.createProperty(geneuri + "FeatureName"), geneobj.getLocus()); if (geneobj.getDescription() != null && !geneobj.getDescription().isEmpty()) { gene.addProperty(model.createProperty(geneuri + "Description"), geneobj.getDescription()); } if (geneobj.getType() != null && !geneobj.getType().isEmpty()) { gene.addProperty(model.createProperty(geneuri + "FeatureType"), geneobj.getType()); } if (geneobj.getOrientation()!= null && !geneobj.getOrientation().isEmpty()) { gene.addProperty(model.createProperty(geneuri + "Orientation"), geneobj.getOrientation()); } // Create the position node, add type and start, stop and chr // information if (geneobj.getChromosome() != null && !geneobj.getChromosome().isEmpty()) { Resource position = model.createResource(); position.addProperty(RDF.type, positionuri); position.addProperty(model.createProperty(positionuri + "Start"), Integer.toString(geneobj.getStart())); position.addProperty(model.createProperty(positionuri + "Stop"), Integer.toString(geneobj.getStop())); position.addProperty(model.createProperty(positionuri + "Scaffold"), scaffold); gene.addProperty(model.createProperty(geneuri + "Position"), position); } // Iterate over the GO term list and add them to the model for (String go : geneobj.getGoterms()) { String goi = go.replace(":", "_"); Resource goterm = model.createResource(gouri + goi); goterm.addProperty(RDF.type, gouri); goterm.addProperty(model.createProperty(gouri + "GoID"), go); gene.addProperty(model.createProperty(geneuri + "Go"), goterm); } return model; } /** * This function add the given Arabidopsis thaliana gene/protein relation * information to the model. * @param agp an At_GeneProtein containing the relation between one gene * and one protein. * @param model a Jena Model to add the information into. * @return the Jena Model with the information added */ public final Model addToModel(final At_GeneProtein agp, final Model model) { // Set the different URI that will be used // Create the gene node and add the type Resource gene = model.createResource(geneuri + agp.getLocus()); gene.addProperty(RDF.type, geneuri); // Create the protein node final Resource protein = model.createResource(proteinuri + agp.getProtein()); // Link the gene node to the protein node gene.addProperty(model.createProperty(geneuri + "Protein"), protein); return model; } /** * Add a Arabidopsis thaliana marker to the model. * This marker can be either located on a genetic map (it has a Position) * or on a physical map (it has no Position but a Start and Stop). * @param marker a At_Marker to add to the model. * @param model the Jena model to which the At_Marker will be added. * @return the given Jena Model containing the original information and the * information about the marker. */ public final Model addToModel(final Marker marker, final Model model) { // Create the gene node and add the type Resource markerres = model.createResource(markeruri + marker.getId()); markerres.addProperty(model.createProperty(markeruri + "MarkerName"), marker.getName()); markerres.addProperty(RDF.type, markeruri); if (marker.getSgnid() != null && !marker.getSgnid().isEmpty()){ markerres.addProperty(model.createProperty(markeruri + "SGN-ID"), marker.getSgnid()); } // Create the scaffold node, add type and name Resource scaffold = model.createResource(scaffolduri + marker.getChromosome()); scaffold.addProperty(RDF.type, scaffolduri); if (marker.getChromosome() != null && !marker.getChromosome().isEmpty()) { // Add the scaffold name scaffold.addProperty(model.createProperty(scaffolduri + "ScaffoldName"), marker.getChromosome()); // Create the position node to the physical and genetic map if (marker.isGenetic()) { // Genetic location of the marker markerres.addProperty(model.createProperty(markeruri + "mapPosition"), marker.getPosition()); markerres.addProperty(model.createProperty(markeruri + "Chromosome"), marker.getChromosome()); } else { // Physical location of the marker Resource position = model.createResource(); position.addProperty(RDF.type, positionuri); position.addProperty(model.createProperty(positionuri + "Start"), Integer.toString(marker.getStart())); position.addProperty(model.createProperty(positionuri + "Stop"), Integer.toString(marker.getStop())); position.addProperty(model.createProperty(positionuri + "Scaffold"), scaffold); markerres.addProperty(model.createProperty(markeruri + "Position"), position); } } return model; } /** * Add the given description to a given gene in the given Jena Model. * @param geneid the geneid of the gene (used in the URI). * @param description the description to be added to the gene. * @param model the model in which this gene and its description go * @return the Jena Model with the added information */ public final Model addGeneDescriptionToModel(final String geneid, String description, final Model model) { description = description.replaceAll("&#1", ""); description = StringEscapeUtils.unescapeHtml(description); description = description.replaceAll("&", ""); Resource gene = model.createResource(geneuri + geneid); gene.addProperty(RDF.type, geneuri); gene.addProperty(model.createProperty(geneuri + "Description"), description); return model; } /** * Link the given protein (ID) to the given gene (ID) in the provided Model. * @param model a Jena Model in which the gene will be linked to the protein. * @param geneid a String representing the gene identifier. * @param protid a String representing the protein identifier * @return the Jena Model linking the gene to the protein. */ public Model addProteinToModel(Model model, String geneid, String protid) { final Resource gene = model.createResource(geneuri + geneid); gene.addProperty(RDF.type, geneuri); final Resource protein = model.createResource(proteinuri + protid); gene.addProperty(model.createProperty(geneuri + "Protein"), protein); return model; } /** * Add the given description to a given gene in the given Jena Model. * @param protid1 the protein id of the first protein (used in the URI). * @param protid2 the protein id of the second protein (used in the URI). * @param model the model in which this gene and its description go * @return the Jena Model with the added information */ public final Model addProteinProteinInteractionToModel(final String protid1, String protid2, final Model model) { String prot_uri = "http://purl.uniprot.org/core/"; Resource prot1 = model.createResource(proteinuri + protid1); Resource prot2 = model.createResource(proteinuri + protid2); prot1.addProperty(model.createProperty(prot_uri + "Interact"), prot2); prot2.addProperty(model.createProperty(prot_uri + "Interact"), prot1); return model; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.testutils.ManuallyTriggeredDirectExecutor; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.execution.SuppressRestartsException; import org.apache.flink.runtime.executiongraph.failover.FailoverStrategy; import org.apache.flink.runtime.executiongraph.failover.FailoverStrategy.Factory; import org.apache.flink.runtime.executiongraph.failover.RestartPipelinedRegionStrategy; import org.apache.flink.runtime.executiongraph.restart.FixedDelayRestartStrategy; import org.apache.flink.runtime.executiongraph.restart.RestartStrategy; import org.apache.flink.runtime.executiongraph.utils.SimpleSlotProvider; import org.apache.flink.runtime.instance.SlotProvider; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobStatus; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.testtasks.NoOpInvokable; import org.apache.flink.util.SerializedValue; import org.junit.Test; import java.util.Collections; import java.util.concurrent.Executor; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.waitUntilExecutionState; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.waitUntilJobStatus; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * These tests make sure that global failover (restart all) always takes precedence over * local recovery strategies for the {@link RestartPipelinedRegionStrategy} * * <p>This test must be in the package it resides in, because it uses package-private methods * from the ExecutionGraph classes. */ public class PipelinedRegionFailoverConcurrencyTest { /** * Tests that a cancellation concurrent to a local failover leads to a properly * cancelled state. */ @Test public void testCancelWhileInLocalFailover() throws Exception { // the logic in this test is as follows: // - start a job // - cause a task failure and delay the local recovery action via the manual executor // - cancel the job to go into cancelling // - resume in local recovery action // - validate that this does in fact not start a new task, because the graph as a // whole should now be cancelled already final JobID jid = new JobID(); final int parallelism = 2; final ManuallyTriggeredDirectExecutor executor = new ManuallyTriggeredDirectExecutor(); final SimpleSlotProvider slotProvider = new SimpleSlotProvider(jid, parallelism); final ExecutionGraph graph = createSampleGraph( jid, new FailoverPipelinedRegionWithCustomExecutor(executor), new FixedDelayRestartStrategy(Integer.MAX_VALUE, 0), slotProvider, 2); final ExecutionJobVertex ejv = graph.getVerticesTopologically().iterator().next(); final ExecutionVertex vertex1 = ejv.getTaskVertices()[0]; final ExecutionVertex vertex2 = ejv.getTaskVertices()[1]; graph.scheduleForExecution(); assertEquals(JobStatus.RUNNING, graph.getState()); // let one of the vertices fail - that triggers a local recovery action vertex1.getCurrentExecutionAttempt().fail(new Exception("test failure")); assertEquals(ExecutionState.FAILED, vertex1.getCurrentExecutionAttempt().getState()); // graph should still be running and the failover recovery action should be queued assertEquals(JobStatus.RUNNING, graph.getState()); assertEquals(1, executor.numQueuedRunnables()); // now cancel the job graph.cancel(); assertEquals(JobStatus.CANCELLING, graph.getState()); assertEquals(ExecutionState.FAILED, vertex1.getCurrentExecutionAttempt().getState()); assertEquals(ExecutionState.CANCELING, vertex2.getCurrentExecutionAttempt().getState()); // let the recovery action continue executor.trigger(); // now report that cancelling is complete for the other vertex vertex2.getCurrentExecutionAttempt().cancelingComplete(); assertEquals(JobStatus.CANCELED, graph.getState()); assertTrue(vertex1.getCurrentExecutionAttempt().getState().isTerminal()); assertTrue(vertex2.getCurrentExecutionAttempt().getState().isTerminal()); // make sure all slots are recycled assertEquals(parallelism, slotProvider.getNumberOfAvailableSlots()); } /** * Tests that a terminal global failure concurrent to a local failover * leads to a properly failed state. */ @Test public void testGlobalFailureConcurrentToLocalFailover() throws Exception { // the logic in this test is as follows: // - start a job // - cause a task failure and delay the local recovery action via the manual executor // - cause a global failure // - resume in local recovery action // - validate that this does in fact not start a new task, because the graph as a // whole should now be terminally failed already final JobID jid = new JobID(); final int parallelism = 2; final ManuallyTriggeredDirectExecutor executor = new ManuallyTriggeredDirectExecutor(); final SimpleSlotProvider slotProvider = new SimpleSlotProvider(jid, parallelism); final ExecutionGraph graph = createSampleGraph( jid, new FailoverPipelinedRegionWithCustomExecutor(executor), new FixedDelayRestartStrategy(Integer.MAX_VALUE, 0), slotProvider, 2); final ExecutionJobVertex ejv = graph.getVerticesTopologically().iterator().next(); final ExecutionVertex vertex1 = ejv.getTaskVertices()[0]; final ExecutionVertex vertex2 = ejv.getTaskVertices()[1]; graph.scheduleForExecution(); assertEquals(JobStatus.RUNNING, graph.getState()); // let one of the vertices fail - that triggers a local recovery action vertex1.getCurrentExecutionAttempt().fail(new Exception("test failure")); assertEquals(ExecutionState.FAILED, vertex1.getCurrentExecutionAttempt().getState()); // graph should still be running and the failover recovery action should be queued assertEquals(JobStatus.RUNNING, graph.getState()); assertEquals(1, executor.numQueuedRunnables()); // now cancel the job graph.failGlobal(new SuppressRestartsException(new Exception("test exception"))); assertEquals(JobStatus.FAILING, graph.getState()); assertEquals(ExecutionState.FAILED, vertex1.getCurrentExecutionAttempt().getState()); assertEquals(ExecutionState.CANCELING, vertex2.getCurrentExecutionAttempt().getState()); // let the recovery action continue executor.trigger(); // now report that cancelling is complete for the other vertex vertex2.getCurrentExecutionAttempt().cancelingComplete(); assertEquals(JobStatus.FAILED, graph.getState()); assertTrue(vertex1.getCurrentExecutionAttempt().getState().isTerminal()); assertTrue(vertex2.getCurrentExecutionAttempt().getState().isTerminal()); // make sure all slots are recycled assertEquals(parallelism, slotProvider.getNumberOfAvailableSlots()); } /** * Tests that a local failover does not try to trump a global failover. */ @Test public void testGlobalRecoveryConcurrentToLocalRecovery() throws Exception { // the logic in this test is as follows: // - start a job // - cause a task failure and delay the local recovery action via the manual executor // - cause a global failure that is recovering immediately // - resume in local recovery action // - validate that this does in fact not cause another task restart, because the global // recovery should already have restarted the task graph final JobID jid = new JobID(); final int parallelism = 2; final ManuallyTriggeredDirectExecutor executor = new ManuallyTriggeredDirectExecutor(); final SimpleSlotProvider slotProvider = new SimpleSlotProvider(jid, parallelism); final ExecutionGraph graph = createSampleGraph( jid, new FailoverPipelinedRegionWithCustomExecutor(executor), new FixedDelayRestartStrategy(2, 0), // twice restart, no delay slotProvider, 2); RestartPipelinedRegionStrategy strategy = (RestartPipelinedRegionStrategy)graph.getFailoverStrategy(); final ExecutionJobVertex ejv = graph.getVerticesTopologically().iterator().next(); final ExecutionVertex vertex1 = ejv.getTaskVertices()[0]; final ExecutionVertex vertex2 = ejv.getTaskVertices()[1]; graph.scheduleForExecution(); assertEquals(JobStatus.RUNNING, graph.getState()); assertEquals(JobStatus.RUNNING, strategy.getFailoverRegion(vertex1).getState()); // let one of the vertices fail - that triggers a local recovery action vertex2.getCurrentExecutionAttempt().fail(new Exception("test failure")); assertEquals(ExecutionState.FAILED, vertex2.getCurrentExecutionAttempt().getState()); assertEquals(JobStatus.CANCELLING, strategy.getFailoverRegion(vertex2).getState()); // graph should still be running and the failover recovery action should be queued assertEquals(JobStatus.RUNNING, graph.getState()); assertEquals(1, executor.numQueuedRunnables()); // now cancel the job graph.failGlobal(new Exception("test exception")); assertEquals(JobStatus.FAILING, graph.getState()); assertEquals(ExecutionState.FAILED, vertex2.getCurrentExecutionAttempt().getState()); assertEquals(ExecutionState.CANCELING, vertex1.getCurrentExecutionAttempt().getState()); // now report that cancelling is complete for the other vertex vertex1.getCurrentExecutionAttempt().cancelingComplete(); waitUntilJobStatus(graph, JobStatus.RUNNING, 1000); assertEquals(JobStatus.RUNNING, graph.getState()); waitUntilExecutionState(vertex1.getCurrentExecutionAttempt(), ExecutionState.DEPLOYING, 1000); waitUntilExecutionState(vertex2.getCurrentExecutionAttempt(), ExecutionState.DEPLOYING, 1000); vertex1.getCurrentExecutionAttempt().switchToRunning(); vertex2.getCurrentExecutionAttempt().switchToRunning(); assertEquals(ExecutionState.RUNNING, vertex1.getCurrentExecutionAttempt().getState()); assertEquals(ExecutionState.RUNNING, vertex2.getCurrentExecutionAttempt().getState()); // let the recovery action continue - this should do nothing any more executor.trigger(); // validate that the graph is still peachy assertEquals(JobStatus.RUNNING, graph.getState()); assertEquals(JobStatus.RUNNING, strategy.getFailoverRegion(vertex1).getState()); assertEquals(JobStatus.RUNNING, strategy.getFailoverRegion(vertex2).getState()); assertEquals(ExecutionState.RUNNING, vertex1.getCurrentExecutionAttempt().getState()); assertEquals(ExecutionState.RUNNING, vertex2.getCurrentExecutionAttempt().getState()); assertEquals(1, vertex1.getCurrentExecutionAttempt().getAttemptNumber()); assertEquals(1, vertex2.getCurrentExecutionAttempt().getAttemptNumber()); assertEquals(1, vertex1.getCopyOfPriorExecutionsList().size()); assertEquals(1, vertex2.getCopyOfPriorExecutionsList().size()); // make sure all slots are in use assertEquals(0, slotProvider.getNumberOfAvailableSlots()); // validate that a task failure then can be handled by the local recovery vertex2.getCurrentExecutionAttempt().fail(new Exception("test failure")); assertEquals(1, executor.numQueuedRunnables()); // let the local recovery action continue - this should recover the vertex2 executor.trigger(); waitUntilExecutionState(vertex2.getCurrentExecutionAttempt(), ExecutionState.DEPLOYING, 1000); vertex2.getCurrentExecutionAttempt().switchToRunning(); // validate that the local recovery result assertEquals(JobStatus.RUNNING, graph.getState()); assertEquals(JobStatus.RUNNING, strategy.getFailoverRegion(vertex1).getState()); assertEquals(JobStatus.RUNNING, strategy.getFailoverRegion(vertex2).getState()); assertEquals(ExecutionState.RUNNING, vertex1.getCurrentExecutionAttempt().getState()); assertEquals(ExecutionState.RUNNING, vertex2.getCurrentExecutionAttempt().getState()); assertEquals(1, vertex1.getCurrentExecutionAttempt().getAttemptNumber()); assertEquals(2, vertex2.getCurrentExecutionAttempt().getAttemptNumber()); assertEquals(1, vertex1.getCopyOfPriorExecutionsList().size()); assertEquals(2, vertex2.getCopyOfPriorExecutionsList().size()); // make sure all slots are in use assertEquals(0, slotProvider.getNumberOfAvailableSlots()); } // ------------------------------------------------------------------------ // utilities // ------------------------------------------------------------------------ private ExecutionGraph createSampleGraph( JobID jid, Factory failoverStrategy, RestartStrategy restartStrategy, SlotProvider slotProvider, int parallelism) throws Exception { // build a simple execution graph with on job vertex, parallelism 2 final ExecutionGraph graph = new ExecutionGraph( TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jid, "test job", new Configuration(), new SerializedValue<>(new ExecutionConfig()), Time.seconds(10), restartStrategy, failoverStrategy, Collections.emptyList(), Collections.emptyList(), slotProvider, getClass().getClassLoader()); JobVertex jv = new JobVertex("test vertex"); jv.setInvokableClass(NoOpInvokable.class); jv.setParallelism(parallelism); JobGraph jg = new JobGraph(jid, "testjob", jv); graph.attachJobGraph(jg.getVerticesSortedTopologicallyFromSources()); return graph; } // ------------------------------------------------------------------------ private static class FailoverPipelinedRegionWithCustomExecutor implements Factory { private final Executor executor; FailoverPipelinedRegionWithCustomExecutor(Executor executor) { this.executor = executor; } @Override public FailoverStrategy create(ExecutionGraph executionGraph) { return new RestartPipelinedRegionStrategy(executionGraph, executor); } } }
/* * Copyright 2015 Alexey Andreev. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teavm.classlib.java.util; public class TGregorianCalendar extends TCalendar { public static final int BC = 0; public static final int AD = 1; private static final long defaultGregorianCutover = -12219292800000L; private long gregorianCutover = defaultGregorianCutover; private transient int changeYear = 1582; private transient int julianSkew = ((changeYear - 2000) / 400) + julianError() - ((changeYear - 2000) / 100); static byte[] daysInMonth = new byte[] { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }; private static int[] daysInYear = new int[] { 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334 }; private static int[] maximums = new int[] { 1, 292278994, 11, 53, 6, 31, 366, 7, 6, 1, 11, 23, 59, 59, 999, 14 * 3600 * 1000, 7200000 }; private static int[] minimums = new int[] { 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, -13 * 3600 * 1000, 0 }; private static int[] leastMaximums = new int[] { 1, 292269054, 11, 50, 3, 28, 355, 7, 3, 1, 11, 23, 59, 59, 999, 50400000, 1200000 }; private boolean isCached; private int[] cachedFields = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; private long nextMidnightMillis; private long lastMidnightMillis; private int currentYearSkew = 10; private int lastYearSkew; public TGregorianCalendar() { this(TLocale.getDefault()); } public TGregorianCalendar(int year, int month, int day) { set(year, month, day); } public TGregorianCalendar(int year, int month, int day, int hour, int minute) { set(year, month, day, hour, minute); } public TGregorianCalendar(int year, int month, int day, int hour, int minute, int second) { set(year, month, day, hour, minute, second); } TGregorianCalendar(long milliseconds) { this(false); setTimeInMillis(milliseconds); } public TGregorianCalendar(TLocale locale) { this(TTimeZone.getDefault(), locale); } public TGregorianCalendar(TTimeZone zone) { this(zone, TLocale.getDefault()); } public TGregorianCalendar(TTimeZone timezone, TLocale locale) { super(timezone, locale); setTimeInMillis(System.currentTimeMillis()); } TGregorianCalendar(@SuppressWarnings("unused") boolean ignored) { super(TTimeZone.getDefault()); setFirstDayOfWeek(SUNDAY); setMinimalDaysInFirstWeek(1); } @Override public void add(int field, int value) { if (value == 0) { return; } if (field < 0 || field >= ZONE_OFFSET) { throw new IllegalArgumentException(); } isCached = false; if (field == ERA) { complete(); if (fields[ERA] == AD) { if (value >= 0) { return; } set(ERA, BC); } else { if (value <= 0) { return; } set(ERA, AD); } complete(); return; } if (field == YEAR || field == MONTH) { complete(); if (field == MONTH) { int month = fields[MONTH] + value; if (month < 0) { value = (month - 11) / 12; month = 12 + (month % 12); } else { value = month / 12; } set(MONTH, month % 12); } set(YEAR, fields[YEAR] + value); int days = daysInMonth(isLeapYear(fields[YEAR]), fields[MONTH]); if (fields[DATE] > days) { set(DATE, days); } complete(); return; } long multiplier = 0; getTimeInMillis(); // Update the time switch (field) { case MILLISECOND: time += value; break; case SECOND: time += value * 1000L; break; case MINUTE: time += value * 60000L; break; case HOUR: case HOUR_OF_DAY: time += value * 3600000L; break; case AM_PM: multiplier = 43200000L; break; case DATE: case DAY_OF_YEAR: case DAY_OF_WEEK: multiplier = 86400000L; break; case WEEK_OF_YEAR: case WEEK_OF_MONTH: case DAY_OF_WEEK_IN_MONTH: multiplier = 604800000L; break; } if (multiplier > 0) { int offset = getTimeZoneOffset(time); time += value * multiplier; int newOffset = getTimeZoneOffset(time); // Adjust for moving over a DST boundary if (newOffset != offset) { time += offset - newOffset; } } areFieldsSet = false; complete(); } @Override public Object clone() { TGregorianCalendar thisClone = (TGregorianCalendar) super.clone(); thisClone.cachedFields = cachedFields.clone(); return thisClone; } private void fullFieldsCalc(long timeVal, int millis, int zoneOffset) { long days = timeVal / 86400000; if (millis < 0) { millis += 86400000; days--; } // Cannot add ZONE_OFFSET to time as it might overflow millis += zoneOffset; while (millis < 0) { millis += 86400000; days--; } while (millis >= 86400000) { millis -= 86400000; days++; } int dayOfYear = computeYearAndDay(days, timeVal + zoneOffset); fields[DAY_OF_YEAR] = dayOfYear; if (fields[YEAR] == changeYear && gregorianCutover <= timeVal + zoneOffset) { dayOfYear += currentYearSkew; } int month = dayOfYear / 32; boolean leapYear = isLeapYear(fields[YEAR]); int date = dayOfYear - daysInYear(leapYear, month); if (date > daysInMonth(leapYear, month)) { date -= daysInMonth(leapYear, month); month++; } fields[DAY_OF_WEEK] = mod7(days - 3) + 1; int dstOffset = getTimeZoneOffset(timeVal); if (fields[YEAR] > 0) { dstOffset -= zoneOffset; } fields[DST_OFFSET] = dstOffset; if (dstOffset != 0) { long oldDays = days; millis += dstOffset; if (millis < 0) { millis += 86400000; days--; } else if (millis >= 86400000) { millis -= 86400000; days++; } if (oldDays != days) { dayOfYear = computeYearAndDay(days, timeVal - zoneOffset + dstOffset); fields[DAY_OF_YEAR] = dayOfYear; if (fields[YEAR] == changeYear && gregorianCutover <= timeVal - zoneOffset + dstOffset) { dayOfYear += currentYearSkew; } month = dayOfYear / 32; leapYear = isLeapYear(fields[YEAR]); date = dayOfYear - daysInYear(leapYear, month); if (date > daysInMonth(leapYear, month)) { date -= daysInMonth(leapYear, month); month++; } fields[DAY_OF_WEEK] = mod7(days - 3) + 1; } } fields[MILLISECOND] = millis % 1000; millis /= 1000; fields[SECOND] = millis % 60; millis /= 60; fields[MINUTE] = millis % 60; millis /= 60; fields[HOUR_OF_DAY] = millis % 24; fields[AM_PM] = fields[HOUR_OF_DAY] > 11 ? 1 : 0; fields[HOUR] = fields[HOUR_OF_DAY] % 12; if (fields[YEAR] <= 0) { fields[ERA] = BC; fields[YEAR] = -fields[YEAR] + 1; } else { fields[ERA] = AD; } fields[MONTH] = month; fields[DATE] = date; fields[DAY_OF_WEEK_IN_MONTH] = (date - 1) / 7 + 1; fields[WEEK_OF_MONTH] = (date - 1 + mod7(days - date - 2 - (getFirstDayOfWeek() - 1))) / 7 + 1; int daysFromStart = mod7(days - 3 - (fields[DAY_OF_YEAR] - 1) - (getFirstDayOfWeek() - 1)); int week = (fields[DAY_OF_YEAR] - 1 + daysFromStart) / 7 + (7 - daysFromStart >= getMinimalDaysInFirstWeek() ? 1 : 0); if (week == 0) { fields[WEEK_OF_YEAR] = 7 - mod7(daysFromStart - (isLeapYear(fields[YEAR] - 1) ? 2 : 1)) >= getMinimalDaysInFirstWeek() ? 53 : 52; } else if (fields[DAY_OF_YEAR] >= (leapYear ? 367 : 366) - mod7(daysFromStart + (leapYear ? 2 : 1))) { fields[WEEK_OF_YEAR] = 7 - mod7(daysFromStart + (leapYear ? 2 : 1)) >= getMinimalDaysInFirstWeek() ? 1 : week; } else { fields[WEEK_OF_YEAR] = week; } } private void cachedFieldsCheckAndGet(long timeVal, long newTimeMillis, long newTimeMillisAdjusted, int millis, int zoneOffset) { int dstOffset = fields[DST_OFFSET]; if (!isCached || newTimeMillis >= nextMidnightMillis || newTimeMillis <= lastMidnightMillis || cachedFields[4] != zoneOffset || (dstOffset == 0 && (newTimeMillisAdjusted >= nextMidnightMillis)) || (dstOffset != 0 && (newTimeMillisAdjusted <= lastMidnightMillis))) { fullFieldsCalc(timeVal, millis, zoneOffset); isCached = false; } else { fields[YEAR] = cachedFields[0]; fields[MONTH] = cachedFields[1]; fields[DATE] = cachedFields[2]; fields[DAY_OF_WEEK] = cachedFields[3]; fields[ERA] = cachedFields[5]; fields[WEEK_OF_YEAR] = cachedFields[6]; fields[WEEK_OF_MONTH] = cachedFields[7]; fields[DAY_OF_YEAR] = cachedFields[8]; fields[DAY_OF_WEEK_IN_MONTH] = cachedFields[9]; } } int getTimeZoneOffset(long localTime) { return getTimeZone().getOffset(localTime); } @Override protected void computeFields() { int zoneOffset = getTimeZoneOffset(time); if (!isSet[ZONE_OFFSET]) { fields[ZONE_OFFSET] = zoneOffset; } int millis = (int) (time % 86400000); int savedMillis = millis; int dstOffset = fields[DST_OFFSET]; // compute without a change in daylight saving time int offset = zoneOffset + dstOffset; long newTime = time + offset; if (time > 0L && newTime < 0L && offset > 0) { newTime = 0x7fffffffffffffffL; } else if (time < 0L && newTime > 0L && offset < 0) { newTime = 0x8000000000000000L; } if (isCached) { if (millis < 0) { millis += 86400000; } // Cannot add ZONE_OFFSET to time as it might overflow millis += zoneOffset; millis += dstOffset; if (millis < 0) { millis += 86400000; } else if (millis >= 86400000) { millis -= 86400000; } fields[MILLISECOND] = millis % 1000; millis /= 1000; fields[SECOND] = millis % 60; millis /= 60; fields[MINUTE] = millis % 60; millis /= 60; fields[HOUR_OF_DAY] = millis % 24; millis /= 24; fields[AM_PM] = fields[HOUR_OF_DAY] > 11 ? 1 : 0; fields[HOUR] = fields[HOUR_OF_DAY] % 12; long newTimeAdjusted = newTime; if (newTime > 0L && newTimeAdjusted < 0L && dstOffset == 0) { newTimeAdjusted = 0x7fffffffffffffffL; } else if (newTime < 0L && newTimeAdjusted > 0L && dstOffset != 0) { newTimeAdjusted = 0x8000000000000000L; } cachedFieldsCheckAndGet(time, newTime, newTimeAdjusted, savedMillis, zoneOffset); } else { fullFieldsCalc(time, savedMillis, zoneOffset); } for (int i = 0; i < FIELD_COUNT; i++) { isSet[i] = true; } // Caching if (!isCached && newTime != 0x7fffffffffffffffL && newTime != 0x8000000000000000L) { int cacheMillis = 0; cachedFields[0] = fields[YEAR]; cachedFields[1] = fields[MONTH]; cachedFields[2] = fields[DATE]; cachedFields[3] = fields[DAY_OF_WEEK]; cachedFields[4] = zoneOffset; cachedFields[5] = fields[ERA]; cachedFields[6] = fields[WEEK_OF_YEAR]; cachedFields[7] = fields[WEEK_OF_MONTH]; cachedFields[8] = fields[DAY_OF_YEAR]; cachedFields[9] = fields[DAY_OF_WEEK_IN_MONTH]; cacheMillis += (23 - fields[HOUR_OF_DAY]) * 60 * 60 * 1000; cacheMillis += (59 - fields[MINUTE]) * 60 * 1000; cacheMillis += (59 - fields[SECOND]) * 1000; nextMidnightMillis = newTime + cacheMillis; cacheMillis = fields[HOUR_OF_DAY] * 60 * 60 * 1000; cacheMillis += fields[MINUTE] * 60 * 1000; cacheMillis += fields[SECOND] * 1000; lastMidnightMillis = newTime - cacheMillis; isCached = true; } } @Override protected void computeTime() { if (!isLenient()) { if (isSet[HOUR_OF_DAY]) { if (fields[HOUR_OF_DAY] < 0 || fields[HOUR_OF_DAY] > 23) { throw new IllegalArgumentException(); } } else if (isSet[HOUR] && (fields[HOUR] < 0 || fields[HOUR] > 11)) { throw new IllegalArgumentException(); } if (isSet[MINUTE] && (fields[MINUTE] < 0 || fields[MINUTE] > 59)) { throw new IllegalArgumentException(); } if (isSet[SECOND] && (fields[SECOND] < 0 || fields[SECOND] > 59)) { throw new IllegalArgumentException(); } if (isSet[MILLISECOND] && (fields[MILLISECOND] < 0 || fields[MILLISECOND] > 999)) { throw new IllegalArgumentException(); } if (isSet[WEEK_OF_YEAR] && (fields[WEEK_OF_YEAR] < 1 || fields[WEEK_OF_YEAR] > 53)) { throw new IllegalArgumentException(); } if (isSet[DAY_OF_WEEK] && (fields[DAY_OF_WEEK] < 1 || fields[DAY_OF_WEEK] > 7)) { throw new IllegalArgumentException(); } if (isSet[DAY_OF_WEEK_IN_MONTH] && (fields[DAY_OF_WEEK_IN_MONTH] < 1 || fields[DAY_OF_WEEK_IN_MONTH] > 6)) { throw new IllegalArgumentException(); } if (isSet[WEEK_OF_MONTH] && (fields[WEEK_OF_MONTH] < 1 || fields[WEEK_OF_MONTH] > 6)) { throw new IllegalArgumentException(); } if (isSet[AM_PM] && fields[AM_PM] != AM && fields[AM_PM] != PM) { throw new IllegalArgumentException(); } if (isSet[HOUR] && (fields[HOUR] < 0 || fields[HOUR] > 11)) { throw new IllegalArgumentException(); } if (isSet[YEAR]) { if (isSet[ERA] && fields[ERA] == BC && (fields[YEAR] < 1 || fields[YEAR] > 292269054)) { throw new IllegalArgumentException(); } else if (fields[YEAR] < 1 || fields[YEAR] > 292278994) { throw new IllegalArgumentException(); } } if (isSet[MONTH] && (fields[MONTH] < 0 || fields[MONTH] > 11)) { throw new IllegalArgumentException(); } } long timeVal; long hour = 0; if (isSet[HOUR_OF_DAY] && lastTimeFieldSet != HOUR) { hour = fields[HOUR_OF_DAY]; } else if (isSet[HOUR]) { hour = (fields[AM_PM] * 12) + fields[HOUR]; } timeVal = hour * 3600000; if (isSet[MINUTE]) { timeVal += ((long) fields[MINUTE]) * 60000; } if (isSet[SECOND]) { timeVal += ((long) fields[SECOND]) * 1000; } if (isSet[MILLISECOND]) { timeVal += fields[MILLISECOND]; } long days; int year = isSet[YEAR] ? fields[YEAR] : 1970; if (isSet[ERA]) { // Always test for valid ERA, even if the Calendar is lenient if (fields[ERA] != BC && fields[ERA] != AD) { throw new IllegalArgumentException(); } if (fields[ERA] == BC) { year = 1 - year; } } boolean weekMonthSet = isSet[WEEK_OF_MONTH] || isSet[DAY_OF_WEEK_IN_MONTH]; boolean useMonth = (isSet[DATE] || isSet[MONTH] || weekMonthSet) && lastDateFieldSet != DAY_OF_YEAR; if (useMonth && (lastDateFieldSet == DAY_OF_WEEK || lastDateFieldSet == WEEK_OF_YEAR)) { if (isSet[WEEK_OF_YEAR] && isSet[DAY_OF_WEEK]) { useMonth = lastDateFieldSet != WEEK_OF_YEAR && weekMonthSet && isSet[DAY_OF_WEEK]; } else if (isSet[DAY_OF_YEAR]) { useMonth = isSet[DATE] && isSet[MONTH]; } } if (useMonth) { int month = fields[MONTH]; year += month / 12; month %= 12; if (month < 0) { year--; month += 12; } boolean leapYear = isLeapYear(year); days = daysFromBaseYear(year) + daysInYear(leapYear, month); boolean useDate = isSet[DATE]; if (useDate && (lastDateFieldSet == DAY_OF_WEEK || lastDateFieldSet == WEEK_OF_MONTH || lastDateFieldSet == DAY_OF_WEEK_IN_MONTH)) { useDate = !(isSet[DAY_OF_WEEK] && weekMonthSet); } if (useDate) { if (!isLenient() && (fields[DATE] < 1 || fields[DATE] > daysInMonth(leapYear, month))) { throw new IllegalArgumentException(); } days += fields[DATE] - 1; } else { int dayOfWeek; if (isSet[DAY_OF_WEEK]) { dayOfWeek = fields[DAY_OF_WEEK] - 1; } else { dayOfWeek = getFirstDayOfWeek() - 1; } if (isSet[WEEK_OF_MONTH] && lastDateFieldSet != DAY_OF_WEEK_IN_MONTH) { int skew = mod7(days - 3 - (getFirstDayOfWeek() - 1)); days += (fields[WEEK_OF_MONTH] - 1) * 7 + mod7(skew + dayOfWeek - (days - 2)) - skew; } else if (isSet[DAY_OF_WEEK_IN_MONTH]) { if (fields[DAY_OF_WEEK_IN_MONTH] >= 0) { days += mod7(dayOfWeek - (days - 3)) + (fields[DAY_OF_WEEK_IN_MONTH] - 1) * 7; } else { days += daysInMonth(leapYear, month) + mod7(dayOfWeek - (days + daysInMonth(leapYear, month) - 3)) + fields[DAY_OF_WEEK_IN_MONTH] * 7; } } else if (isSet[DAY_OF_WEEK]) { int skew = mod7(days - 3 - (getFirstDayOfWeek() - 1)); days += mod7(mod7(skew + dayOfWeek - (days - 3)) - skew); } } } else { boolean useWeekYear = isSet[WEEK_OF_YEAR] && lastDateFieldSet != DAY_OF_YEAR; if (useWeekYear && isSet[DAY_OF_YEAR]) { useWeekYear = isSet[DAY_OF_WEEK]; } days = daysFromBaseYear(year); if (useWeekYear) { int dayOfWeek; if (isSet[DAY_OF_WEEK]) { dayOfWeek = fields[DAY_OF_WEEK] - 1; } else { dayOfWeek = getFirstDayOfWeek() - 1; } int skew = mod7(days - 3 - (getFirstDayOfWeek() - 1)); days += (fields[WEEK_OF_YEAR] - 1) * 7 + mod7(skew + dayOfWeek - (days - 3)) - skew; if (7 - skew < getMinimalDaysInFirstWeek()) { days += 7; } } else if (isSet[DAY_OF_YEAR]) { if (!isLenient() && (fields[DAY_OF_YEAR] < 1 || fields[DAY_OF_YEAR] > (365 + (isLeapYear(year) ? 1 : 0)))) { throw new IllegalArgumentException(); } days += fields[DAY_OF_YEAR] - 1; } else if (isSet[DAY_OF_WEEK]) { days += mod7(fields[DAY_OF_WEEK] - 1 - (days - 3)); } } lastDateFieldSet = 0; timeVal += days * 86400000; // Use local time to compare with the gregorian change if (year == changeYear && timeVal >= gregorianCutover + julianError() * 86400000L) { timeVal -= julianError() * 86400000L; } this.time = timeVal - getTimeZoneOffset(timeVal); } private int computeYearAndDay(long dayCount, long localTime) { int year = 1970; long days = dayCount; if (localTime < gregorianCutover) { days -= julianSkew; } int approxYears; while ((approxYears = (int) (days / 365)) != 0) { year = year + approxYears; days = dayCount - daysFromBaseYear(year); } if (days < 0) { year = year - 1; days = days + daysInYear(year); } fields[YEAR] = year; return (int) days + 1; } private long daysFromBaseYear(int iyear) { long year = iyear; if (year >= 1970) { long days = (year - 1970) * 365 + ((year - 1969) / 4); if (year > changeYear) { days -= (year - 1901) / 100 - (year - 1601) / 400; } else { if (year == changeYear) { days += currentYearSkew; } else if (year == changeYear - 1) { days += lastYearSkew; } else { days += julianSkew; } } return days; } else if (year <= changeYear) { return (year - 1970) * 365 + ((year - 1972) / 4) + julianSkew; } return (year - 1970) * 365 + ((year - 1972) / 4) - ((year - 2000) / 100) + ((year - 2000) / 400); } private int daysInMonth() { return daysInMonth(isLeapYear(fields[YEAR]), fields[MONTH]); } private int daysInMonth(boolean leapYear, int month) { if (leapYear && month == FEBRUARY) { return daysInMonth[month] + 1; } return daysInMonth[month]; } private int daysInYear(int year) { int daysInYear = isLeapYear(year) ? 366 : 365; if (year == changeYear) { daysInYear -= currentYearSkew; } if (year == changeYear - 1) { daysInYear -= lastYearSkew; } return daysInYear; } private int daysInYear(boolean leapYear, int month) { if (leapYear && month > FEBRUARY) { return daysInYear[month] + 1; } return daysInYear[month]; } @Override public boolean equals(Object object) { return super.equals(object) && gregorianCutover == ((TGregorianCalendar) object).gregorianCutover; } @Override public int getActualMaximum(int field) { int value = maximums[field]; if (value == leastMaximums[field]) { return value; } switch (field) { case WEEK_OF_YEAR: case WEEK_OF_MONTH: isCached = false; break; } complete(); long orgTime = time; int result = 0; switch (field) { case WEEK_OF_YEAR: set(DATE, 31); set(MONTH, DECEMBER); result = get(WEEK_OF_YEAR); if (result == 1) { set(DATE, 31 - 7); result = get(WEEK_OF_YEAR); } areFieldsSet = false; break; case WEEK_OF_MONTH: set(DATE, daysInMonth()); result = get(WEEK_OF_MONTH); areFieldsSet = false; break; case DATE: return daysInMonth(); case DAY_OF_YEAR: return daysInYear(fields[YEAR]); case DAY_OF_WEEK_IN_MONTH: result = get(DAY_OF_WEEK_IN_MONTH) + ((daysInMonth() - get(DATE)) / 7); break; case YEAR: TGregorianCalendar clone = (TGregorianCalendar) clone(); if (get(ERA) == AD) { clone.setTimeInMillis(Long.MAX_VALUE); } else { clone.setTimeInMillis(Long.MIN_VALUE); } result = clone.get(YEAR); clone.set(YEAR, get(YEAR)); if (clone.before(this)) { result--; } break; case DST_OFFSET: result = getMaximum(DST_OFFSET); break; } time = orgTime; return result; } @Override public int getActualMinimum(int field) { return getMinimum(field); } @Override public int getGreatestMinimum(int field) { return minimums[field]; } public final TDate getGregorianChange() { return new TDate(gregorianCutover); } @Override public int getLeastMaximum(int field) { // return value for WEEK_OF_YEAR should make corresponding changes when // the gregorian change date have been reset. if (gregorianCutover != defaultGregorianCutover && field == WEEK_OF_YEAR) { long currentTimeInMillis = time; setTimeInMillis(gregorianCutover); int actual = getActualMaximum(field); setTimeInMillis(currentTimeInMillis); return actual; } return leastMaximums[field]; } @Override public int getMaximum(int field) { return maximums[field]; } @Override public int getMinimum(int field) { return minimums[field]; } @Override public int hashCode() { return super.hashCode() + ((int) (gregorianCutover >>> 32) ^ (int) gregorianCutover); } public boolean isLeapYear(int year) { if (year > changeYear) { return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0); } return year % 4 == 0; } private int julianError() { return changeYear / 100 - changeYear / 400 - 2; } private int mod(int value, int mod) { int rem = value % mod; if (value < 0 && rem < 0) { return rem + mod; } return rem; } private int mod7(long num1) { int rem = (int) (num1 % 7); if (num1 < 0 && rem < 0) { return rem + 7; } return rem; } @Override public void roll(int field, int value) { if (value == 0) { return; } if (field < 0 || field >= ZONE_OFFSET) { throw new IllegalArgumentException(); } isCached = false; complete(); int days; int day; int mod; int maxWeeks; int newWeek; int max = -1; switch (field) { case YEAR: max = maximums[field]; break; case WEEK_OF_YEAR: days = daysInYear(fields[YEAR]); day = DAY_OF_YEAR; mod = mod7(fields[DAY_OF_WEEK] - fields[day] - (getFirstDayOfWeek() - 1)); maxWeeks = (days - 1 + mod) / 7 + 1; newWeek = mod(fields[field] - 1 + value, maxWeeks) + 1; if (newWeek == maxWeeks) { int addDays = (newWeek - fields[field]) * 7; if (fields[day] > addDays && fields[day] + addDays > days) { set(field, 1); } else { set(field, newWeek - 1); } } else if (newWeek == 1) { int week = (fields[day] - ((fields[day] - 1) / 7 * 7) - 1 + mod) / 7 + 1; if (week > 1) { set(field, 1); } else { set(field, newWeek); } } else { set(field, newWeek); } break; case WEEK_OF_MONTH: days = daysInMonth(); day = DATE; mod = mod7(fields[DAY_OF_WEEK] - fields[day] - (getFirstDayOfWeek() - 1)); maxWeeks = (days - 1 + mod) / 7 + 1; newWeek = mod(fields[field] - 1 + value, maxWeeks) + 1; if (newWeek == maxWeeks) { if (fields[day] + (newWeek - fields[field]) * 7 > days) { set(day, days); } else { set(field, newWeek); } } else if (newWeek == 1) { int week = (fields[day] - ((fields[day] - 1) / 7 * 7) - 1 + mod) / 7 + 1; if (week > 1) { set(day, 1); } else { set(field, newWeek); } } else { set(field, newWeek); } break; case DATE: max = daysInMonth(); break; case DAY_OF_YEAR: max = daysInYear(fields[YEAR]); break; case DAY_OF_WEEK: max = maximums[field]; lastDateFieldSet = WEEK_OF_MONTH; break; case DAY_OF_WEEK_IN_MONTH: max = (fields[DATE] + ((daysInMonth() - fields[DATE]) / 7 * 7) - 1) / 7 + 1; break; case ERA: case MONTH: case AM_PM: case HOUR: case HOUR_OF_DAY: case MINUTE: case SECOND: case MILLISECOND: set(field, mod(fields[field] + value, maximums[field] + 1)); if (field == MONTH && fields[DATE] > daysInMonth()) { set(DATE, daysInMonth()); } else if (field == AM_PM) { lastTimeFieldSet = HOUR; } break; } if (max != -1) { set(field, mod(fields[field] - 1 + value, max) + 1); } complete(); } @Override public void roll(int field, boolean increment) { roll(field, increment ? 1 : -1); } public void setGregorianChange(TDate date) { gregorianCutover = date.getTime(); TGregorianCalendar cal = new TGregorianCalendar(); cal.setTime(date); changeYear = cal.get(YEAR); if (cal.get(ERA) == BC) { changeYear = 1 - changeYear; } julianSkew = ((changeYear - 2000) / 400) + julianError() - ((changeYear - 2000) / 100); isCached = false; int dayOfYear = cal.get(DAY_OF_YEAR); if (dayOfYear < julianSkew) { currentYearSkew = dayOfYear - 1; lastYearSkew = julianSkew - dayOfYear + 1; } else { lastYearSkew = 0; currentYearSkew = julianSkew; } isCached = false; } @Override public void setFirstDayOfWeek(int value) { super.setFirstDayOfWeek(value); isCached = false; } @Override public void setMinimalDaysInFirstWeek(int value) { super.setMinimalDaysInFirstWeek(value); isCached = false; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticfilesystem.model; import java.io.Serializable; /** * */ public class DescribeMountTargetsResult implements Serializable, Cloneable { /** * <p> * If the request included the <code>Marker</code>, the response returns * that value in this field. * </p> */ private String marker; /** * <p> * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. * </p> */ private com.amazonaws.internal.SdkInternalList<MountTargetDescription> mountTargets; /** * <p> * If a value is present, there are more mount targets to return. In a * subsequent request, you can provide <code>Marker</code> in your request * with this value to retrieve the next set of mount targets. * </p> */ private String nextMarker; /** * <p> * If the request included the <code>Marker</code>, the response returns * that value in this field. * </p> * * @param marker * If the request included the <code>Marker</code>, the response * returns that value in this field. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * If the request included the <code>Marker</code>, the response returns * that value in this field. * </p> * * @return If the request included the <code>Marker</code>, the response * returns that value in this field. */ public String getMarker() { return this.marker; } /** * <p> * If the request included the <code>Marker</code>, the response returns * that value in this field. * </p> * * @param marker * If the request included the <code>Marker</code>, the response * returns that value in this field. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeMountTargetsResult withMarker(String marker) { setMarker(marker); return this; } /** * <p> * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. * </p> * * @return Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. */ public java.util.List<MountTargetDescription> getMountTargets() { if (mountTargets == null) { mountTargets = new com.amazonaws.internal.SdkInternalList<MountTargetDescription>(); } return mountTargets; } /** * <p> * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. * </p> * * @param mountTargets * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. */ public void setMountTargets( java.util.Collection<MountTargetDescription> mountTargets) { if (mountTargets == null) { this.mountTargets = null; return; } this.mountTargets = new com.amazonaws.internal.SdkInternalList<MountTargetDescription>( mountTargets); } /** * <p> * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setMountTargets(java.util.Collection)} or * {@link #withMountTargets(java.util.Collection)} if you want to override * the existing values. * </p> * * @param mountTargets * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeMountTargetsResult withMountTargets( MountTargetDescription... mountTargets) { if (this.mountTargets == null) { setMountTargets(new com.amazonaws.internal.SdkInternalList<MountTargetDescription>( mountTargets.length)); } for (MountTargetDescription ele : mountTargets) { this.mountTargets.add(ele); } return this; } /** * <p> * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. * </p> * * @param mountTargets * Returns the file system's mount targets as an array of * <code>MountTargetDescription</code> objects. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeMountTargetsResult withMountTargets( java.util.Collection<MountTargetDescription> mountTargets) { setMountTargets(mountTargets); return this; } /** * <p> * If a value is present, there are more mount targets to return. In a * subsequent request, you can provide <code>Marker</code> in your request * with this value to retrieve the next set of mount targets. * </p> * * @param nextMarker * If a value is present, there are more mount targets to return. In * a subsequent request, you can provide <code>Marker</code> in your * request with this value to retrieve the next set of mount targets. */ public void setNextMarker(String nextMarker) { this.nextMarker = nextMarker; } /** * <p> * If a value is present, there are more mount targets to return. In a * subsequent request, you can provide <code>Marker</code> in your request * with this value to retrieve the next set of mount targets. * </p> * * @return If a value is present, there are more mount targets to return. In * a subsequent request, you can provide <code>Marker</code> in your * request with this value to retrieve the next set of mount * targets. */ public String getNextMarker() { return this.nextMarker; } /** * <p> * If a value is present, there are more mount targets to return. In a * subsequent request, you can provide <code>Marker</code> in your request * with this value to retrieve the next set of mount targets. * </p> * * @param nextMarker * If a value is present, there are more mount targets to return. In * a subsequent request, you can provide <code>Marker</code> in your * request with this value to retrieve the next set of mount targets. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeMountTargetsResult withNextMarker(String nextMarker) { setNextMarker(nextMarker); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMarker() != null) sb.append("Marker: " + getMarker() + ","); if (getMountTargets() != null) sb.append("MountTargets: " + getMountTargets() + ","); if (getNextMarker() != null) sb.append("NextMarker: " + getNextMarker()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeMountTargetsResult == false) return false; DescribeMountTargetsResult other = (DescribeMountTargetsResult) obj; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getMountTargets() == null ^ this.getMountTargets() == null) return false; if (other.getMountTargets() != null && other.getMountTargets().equals(this.getMountTargets()) == false) return false; if (other.getNextMarker() == null ^ this.getNextMarker() == null) return false; if (other.getNextMarker() != null && other.getNextMarker().equals(this.getNextMarker()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getMountTargets() == null) ? 0 : getMountTargets() .hashCode()); hashCode = prime * hashCode + ((getNextMarker() == null) ? 0 : getNextMarker().hashCode()); return hashCode; } @Override public DescribeMountTargetsResult clone() { try { return (DescribeMountTargetsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package de.fau.cs.mad.rpgpack.character; import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; import de.fau.cs.mad.rpgpack.R; import de.fau.cs.mad.rpgpack.ReattachingPopup; import de.fau.cs.mad.rpgpack.SlideoutNavigationActivity; import de.fau.cs.mad.rpgpack.game.CharacterPlayActivity; import de.fau.cs.mad.rpgpack.jackson.IEditableContent; import de.fau.cs.mad.rpgpack.jackson.Row; import de.fau.cs.mad.rpgpack.jackson.Table; import de.fau.cs.mad.rpgpack.matrix.MatrixFragment; import de.fau.cs.mad.rpgpack.matrix.MatrixItem; import de.fau.cs.mad.rpgpack.template_generator.FolderElementData; import de.fau.cs.mad.rpgpack.template_generator.FolderFragment; import de.fau.cs.mad.rpgpack.template_generator.GeneralFragment; import de.fau.cs.mad.rpgpack.template_generator.MyClickableSpan; import de.fau.cs.mad.rpgpack.template_generator.TableFragment; import de.fau.cs.mad.rpgpack.template_generator.TableFragment.content_type; import android.content.Context; import android.graphics.Paint; import android.graphics.Typeface; import android.graphics.drawable.BitmapDrawable; import android.text.Editable; import android.text.Selection; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.TextWatcher; import android.text.method.LinkMovementMethod; import android.text.style.StyleSpan; import android.text.style.UnderlineSpan; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.view.WindowManager; import android.view.View.OnClickListener; import android.view.inputmethod.InputMethodManager; import android.view.ViewGroup; import android.widget.BaseExpandableListAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.ToggleButton; import android.widget.CompoundButton.OnCheckedChangeListener; public class CustomExpandableListAdapter extends BaseExpandableListAdapter { private Context mContext; private String[][] mContents; private String[] mTitles; private content_type[][] mTypes; private String[] mHeadlines; private Table mJacksonTable; TableFragment mBelongsTo; public CustomExpandableListAdapter(Context context, Table jacksonTable, TableFragment belongsTo, String[] headlines, content_type[][] type, String[] groupNames, String[][] contents) { super(); if(groupNames.length != contents.length) { throw new IllegalArgumentException("Titles and Contents must be the same size."); } mTypes = type; mHeadlines = headlines; mContext = context; mContents = contents; mTitles = groupNames; mJacksonTable = jacksonTable; mBelongsTo = belongsTo; } @Override public String getChild(int groupPosition, int childPosition) { return mContents[groupPosition][childPosition]; } @Override public long getChildId(int groupPosition, int childPosition) { return 0; } //NOTE: childPosition == columnIndex; groupPosition == rowIndex @Override public View getChildView(final int groupPosition, final int childPosition, boolean isLastChild, View convertView, ViewGroup parent) { // Log.d("CustomExpandableListAdapter", "getChildView!"); if (convertView == null) { LayoutInflater infalInflater = (LayoutInflater) mContext .getSystemService(Context.LAYOUT_INFLATER_SERVICE); convertView = infalInflater.inflate(R.layout.table_view_child_item, null); } LinearLayout content = (LinearLayout) convertView.findViewById(R.id.content_keeper); content.removeAllViews(); final String headline = mHeadlines[childPosition]; if(mTypes[groupPosition][childPosition] == content_type.editText){ TextView txt = new TextView(mContext); txt.setText(mContents[groupPosition][childPosition]); content.addView(txt); } else if(mTypes[groupPosition][childPosition] == content_type.popup){ // TextView txt = new TextView(mContext); // txt.setText("..."); // txt.setTextColor(mContext.getResources().getColor(R.color.green)); LinearLayout popup = initPopup(headline, mJacksonTable.getEntry(childPosition, groupPosition), childPosition, groupPosition); // popup.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED); // int popupHeight = popup.getMeasuredHeight(); // item.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED); // int itemHeight = popup.getMeasuredHeight(); // if(popupHeight < itemHeight){ // LayoutParams popupParams = popup.getLayoutParams(); // LayoutParams p = new LinearLayout.LayoutParams(popupParams.width, itemHeight); // popup.setLayoutParams(p); // } LayoutParams p = new LinearLayout.LayoutParams(0, LayoutParams.MATCH_PARENT, Gravity.CENTER_VERTICAL); content.setLayoutParams(p); // int paddingBottom = content.getPaddingBottom(); // int paddingRight = content.getPaddingRight(); // int paddingTop = content.getPaddingTop(); // content.setPadding(android.R.attr.expandableListPreferredChildPaddingLeft, paddingTop, paddingRight, paddingBottom); content.addView(popup); // LayoutParams p = new LinearLayout.LayoutParams(0, LayoutParams.MATCH_PARENT, Gravity.CENTER_VERTICAL); // popup.setLayoutParams(p); content = popup; // content = popup; } else if(mTypes[groupPosition][childPosition] == content_type.checkbox){ final CheckBox cb = new CheckBox(mContext); cb.setButtonDrawable(R.drawable.custom_checkbox); final IEditableContent jacksonEntry = mJacksonTable.getEntry(childPosition, groupPosition); if(jacksonEntry != null) { // sets the onCheckedChangeListener // this is needed so we can take over the changes to our jackson model if(SlideoutNavigationActivity.getAc().inEditMode()){ // cb.setOnCheckedChangeListener(this); cb.setOnClickListener(new OnClickListener(){ @Override public void onClick(View v) { // you might keep a reference to the CheckBox to avoid this class cast boolean checked = ((CheckBox)v).isChecked(); IEditableContent entry = (IEditableContent) v.getTag(R.id.jackson_row_tag_id); // set the new value entry.setContent(String.valueOf(checked)); } }); } else{ cb.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { cb.setChecked(!isChecked); } }); } // sets the associated jackson row to this cb.setTag(R.id.jackson_row_tag_id, jacksonEntry); // set checked state to jackson state cb.setChecked(Boolean.parseBoolean(jacksonEntry.getContent())); } content.addView(cb); } final TextView item = (TextView) convertView.findViewById(R.id.table_view_item); // item.addTextChangedListener(new TextWatcher(){ // public void afterTextChanged(Editable s) { // resizePopup(item, convView); // } // public void beforeTextChanged(CharSequence s, int start, int count, int after){ // } // public void onTextChanged(CharSequence s, int start, int before, int count){ // } // }); item.setText(headline + ":"); final View convView = convertView; // resizePopup(convView, convView); //TODO: further cases and positioning + onClickListener // item.setText(mContents[groupPosition][childPosition]); // row.setTextSize(R.dimen.text_large); return convView; } // protected void resizePopup(View headlineView, View containingView) { //// Log.d("CustomExpandableListAdapter", "resizing!"); // LinearLayout content = (LinearLayout) containingView.findViewById(R.id.content_keeper); // headlineView.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED); // int headlineHeight = headlineView.getMeasuredHeight(); // content.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED); // int contentHeight = content.getMeasuredHeight(); // int contentWidth = content.getMeasuredWidth(); // Log.d("CustomExpandableListAdapter", "contentHeight ==" + contentHeight); // Log.d("CustomExpandableListAdapter", "headlineHeight ==" + headlineHeight); // if(contentHeight < headlineHeight){ // Log.d("CustomExpandableListAdapter", "resizing!"); //// LayoutParams popupParams = popup.getLayoutParams(); // LayoutParams p = new LinearLayout.LayoutParams(contentWidth, headlineHeight); // content.setLayoutParams(p); // View child = content.getChildAt(0); // if(child != null){ // Log.d("CustomExpandableListAdapter", "resizing child"); // child.setLayoutParams(p); // } // } // } @Override public int getChildrenCount(int groupPosition) { return mContents[groupPosition].length; } @Override public String[] getGroup(int groupPosition) { return mContents[groupPosition]; } @Override public int getGroupCount() { return mContents.length; } @Override public long getGroupId(int groupPosition) { return 0; } @Override public View getGroupView(final int groupPosition, boolean isExpanded, View convertView, ViewGroup parent) { if (convertView == null) { LayoutInflater infalInflater = (LayoutInflater) mContext .getSystemService(Context.LAYOUT_INFLATER_SERVICE); convertView = infalInflater.inflate(R.layout.table_view_group_item, null); } TextView item = (TextView) convertView.findViewById(R.id.rowName); item.setTypeface(Typeface.DEFAULT_BOLD); item.setPaintFlags(item.getPaintFlags()| Paint.UNDERLINE_TEXT_FLAG); item.setText(mTitles[groupPosition]); // row.setTextSize(R.dimen.text_large); final CheckBox checkbox = (CheckBox) convertView.findViewById(R.id.group_checkbox); final CheckBox favorite = (CheckBox) convertView.findViewById(R.id.favorite_checkbox); if(SlideoutNavigationActivity.theActiveActivity instanceof CharacterPlayActivity){ checkbox.setVisibility(View.INVISIBLE); //XXX: show favorites again when implemented favorite.setVisibility(View.INVISIBLE); } else if(SlideoutNavigationActivity.theActiveActivity instanceof CharacterEditActivity){ favorite.setVisibility(View.INVISIBLE); } final Row jacksonRow = mJacksonTable.getRow(groupPosition); //important: remove listener before setting -> else listener will be called checkbox.setOnCheckedChangeListener(null); checkbox.setChecked(jacksonRow.isSelected()); favorite.setOnCheckedChangeListener(null); favorite.setChecked(jacksonRow.isFavorite()); Log.d("CUSTOM EXP ADAPTER", "groupNumber: " + groupPosition + "; fav: " + jacksonRow.isFavorite() + ", checked: " + jacksonRow.isSelected()); checkbox.setOnCheckedChangeListener(new OnCheckedChangeListener() { // final Row mRow = jacksonRow; @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { // mRow.setSelected(isChecked); Log.d("CUSTOM EXP ADAPTER", "groupNumber: " + groupPosition + "checkbon: " + isChecked); mJacksonTable.getRow(groupPosition).setSelected(isChecked); checkbox.setSelected(isChecked); if(!isChecked){ favorite.setChecked(false); // mRow.setFavorite(false); mJacksonTable.getRow(groupPosition).setFavorite(false); } // Log.d("CUSTOM EXP ADAPTER", "onCheckedChange:"+isChecked); } }); favorite.setOnCheckedChangeListener(new OnCheckedChangeListener() { // final Row mRow = jacksonRow; @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { // mRow.setFavorite(isChecked); mJacksonTable.getRow(groupPosition).setFavorite(isChecked); favorite.setSelected(isChecked); if(isChecked){ checkbox.setChecked(true); // mRow.setSelected(true); mJacksonTable.getRow(groupPosition).setSelected(true); } } }); return convertView; } @Override public boolean hasStableIds() { return true; } @Override public boolean isChildSelectable(int groupPosition, int childPosition) { return true; } // /** // * This is called when a checkbox changed its state. // */ // @Override // public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { // // get the associated entry // IEditableContent entry = (IEditableContent) buttonView.getTag(R.id.jackson_row_tag_id); // // set the new value // entry.setContent(String.valueOf(isChecked)); // } int styleStart = 0; int cursorLoc = 0; private LinearLayout initPopup(final String headline, final IEditableContent jacksonEntry, final int columnIndex, final int rowIndex){ // Log.d("CUSTOM_EXPANDABLE_LIST_ADAPTER", "init_popup"); final LinearLayout ll = new LinearLayout(mContext); final TextView newElement = new TextView(mContext); // // JACKSON START // //temporarily disabled, have to take a closer look to popups // if(jacksonEntry != null) { // newElement.setText(jacksonEntry.toString()); // newElement.addTextChangedListener(new TextWatcher() { // final IEditableContent myJacksonEntry = jacksonEntry; // // callback // public void afterTextChanged(Editable s) { // // guard. only set column title if we are not currently inflating with data // if(!jacksonInflateWithData) { // myJacksonEntry.setContent(s.toString()); // } // } // public void beforeTextChanged(CharSequence s, int start, int count, int after){ // } // public void onTextChanged(CharSequence s, int start, int before, int count){ // } // }); // } // // JACKSON END // final TextView txt = (TextView) newElement; LayoutInflater inflater = (LayoutInflater) SlideoutNavigationActivity.theActiveActivity.getSystemService(Context.LAYOUT_INFLATER_SERVICE); final View popupView = inflater.inflate(R.layout.table_view_popup, null); // final View layoutContainingHeadline = (View) popupView.findViewById(R.id.popup_content); final TextView popupHeadline = (TextView) popupView.findViewById(R.id.popup_headline); popupHeadline.setText(headline); final EditText inputPopup = (EditText) popupView.findViewById(R.id.popup_editText); final ToggleButton toggleBold = (ToggleButton) popupView.findViewById(R.id.toggle_bold); toggleBold.setOnClickListener(new Button.OnClickListener() { public void onClick(View v) { int selectionStart = inputPopup.getSelectionStart(); styleStart = selectionStart; // following code might be needed if they fix this bug: // http://code.google.com/p/android/issues/detail?id=62508 //but atm text marking in popupwindow doesnt work so we don't need to deal with //selectionEnd != selectionStart /* int selectionEnd = inputPopup.getSelectionEnd(); if (selectionStart > selectionEnd){ int temp = selectionEnd; selectionEnd = selectionStart; selectionStart = temp; } if (selectionEnd > selectionStart) { Spannable str = inputPopup.getText(); StyleSpan[] ss = str.getSpans(selectionStart, selectionEnd, StyleSpan.class); boolean exists = false; for (int i = 0; i < ss.length; i++) { if (ss[i].getStyle() == android.graphics.Typeface.BOLD){ str.removeSpan(ss[i]); exists = true; } } if (!exists){ str.setSpan(new StyleSpan(android.graphics.Typeface.BOLD), selectionStart, selectionEnd, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); } toggleBold.setChecked(false); } */ } }); final ToggleButton toggleItalic = (ToggleButton) popupView.findViewById(R.id.toggle_italic); toggleItalic.setOnClickListener(new Button.OnClickListener() { public void onClick(View v) { int selectionStart = inputPopup.getSelectionStart(); styleStart = selectionStart; } }); final ToggleButton toggleUnderlined = (ToggleButton) popupView.findViewById(R.id.toggle_underline); toggleUnderlined.setPaintFlags(Paint.UNDERLINE_TEXT_FLAG); toggleUnderlined.setOnClickListener(new Button.OnClickListener() { public void onClick(View v) { int selectionStart = inputPopup.getSelectionStart(); styleStart = selectionStart; } }); inputPopup.addTextChangedListener(new TextWatcher() { final IEditableContent myJacksonEntry = jacksonEntry; public void afterTextChanged(Editable s) { int position = Selection.getSelectionStart(inputPopup.getText()); if (position < 0){ position = 0; } if (position > 0){ if (styleStart > position || position > (cursorLoc + 1)){ //user changed cursor location, reset styleStart = position - 1; } cursorLoc = position; // Log.d("textstyle", "styleStart == " + styleStart + ", position == " + position); if (toggleBold.isChecked()){ StyleSpan[] ss = s.getSpans(styleStart, position, StyleSpan.class); for (int i = 0; i < ss.length; i++) { if (ss[i].getStyle() == android.graphics.Typeface.BOLD){ s.removeSpan(ss[i]); } } Log.d("CustomExpendableListAdapter", "setting span: bold"); s.setSpan(new StyleSpan(android.graphics.Typeface.BOLD), styleStart, position, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); } if (toggleItalic.isChecked()){ StyleSpan[] ss = s.getSpans(styleStart, position, StyleSpan.class); for (int i = 0; i < ss.length; i++) { if (ss[i].getStyle() == android.graphics.Typeface.ITALIC){ s.removeSpan(ss[i]); } } Log.d("CustomExpendableListAdapter", "setting span: italic"); s.setSpan(new StyleSpan(android.graphics.Typeface.ITALIC), styleStart, position, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); } if (toggleUnderlined.isChecked()){ UnderlineSpan[] ss = s.getSpans(styleStart, position, UnderlineSpan.class); for (int i = 0; i < ss.length; i++) { s.removeSpan(ss[i]); } Log.d("CustomExpendableListAdapter", "setting span: underlined"); s.setSpan(new UnderlineSpan(), styleStart, position, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); } inputPopup.setText(s); inputPopup.setSelection(position); myJacksonEntry.setContent(s.toString()); } } public void beforeTextChanged(CharSequence s, int start, int count, int after) { //unused } public void onTextChanged(CharSequence s, int start, int before, int count) { } }); // TEST if(!jacksonEntry.getContent().isEmpty()) { // inputPopup.setText(jacksonEntry.getContent()); inputPopup.setText(jacksonEntry.getContent()); if(SlideoutNavigationActivity.getAc().inSelectionMode()){ Log.d("CustomExpendableListAdapter", "durchsuche Popup nach Referenzen!"); String searchForReferences = inputPopup.getText().toString(); Pattern p = Pattern.compile("@"); Matcher m = p.matcher(searchForReferences); //following needed for onClick of ClickableSpan to work! inputPopup.setMovementMethod(LinkMovementMethod.getInstance()); SpannableStringBuilder span = (SpannableStringBuilder) inputPopup.getText(); boolean foundAny = false; while (m.find()){ foundAny = true; // System.out.print("Start index: " + matcher.start()); int startIndex = m.start(); int endIndex = startIndex; while(searchForReferences.charAt(endIndex) != ' ' && searchForReferences.charAt(endIndex) != '\n' && searchForReferences.charAt(endIndex) != '\b'){ endIndex++; } // String referenceString = searchForReferences.substring(startIndex, endIndex); // SlideoutNavigationActivity.theActiveActivity.getRootFragment().getAllMatrixReferences(); // mBelongsTo.getAllMatrixReferences(((SlideoutNavigationActivity) SlideoutNavigationActivity.theActiveActivity).getRootFragment()); Log.d("CustomExpandableListAdapter", "Popup: startIndex: " + startIndex + "; endIndex: " + endIndex); Log.d("CustomExpendableListAdapter", "setting span: clickable"); span.setSpan(new MyClickableSpan(popupView, mBelongsTo), startIndex, endIndex, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); // span.setSpan(new ClickableSpan() { // @Override // public void onClick(View v) { // Log.d("MyClickableSpan", "MyClickableSpan -> onClick"); // } // }, startIndex, endIndex, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); } if(foundAny){ inputPopup.setText(span); } } } // TEST END //before // toggleBold.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { // public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { // if (isChecked) { // if(toggleItalic.isChecked()){ //// inputPopup.setTextAppearance(getActivity(), R.style.italic_bold); // inputPopup.setTypeface(Typeface.defaultFromStyle(Typeface.BOLD_ITALIC)); // } // else{ // inputPopup.setTypeface(Typeface.defaultFromStyle(Typeface.BOLD)); // } // } else { // if(toggleItalic.isChecked()){ //// inputPopup.setTextAppearance(getActivity(), R.style.italic); // inputPopup.setTypeface(Typeface.defaultFromStyle(Typeface.ITALIC)); // } // else{ //// inputPopup.setTextAppearance(getActivity(), R.style.normal_text); // inputPopup.setTypeface(Typeface.defaultFromStyle(Typeface.NORMAL)); // } // } // } // }); DisplayMetrics displayMetrics = mContext.getResources().getDisplayMetrics(); // float dpHeight = displayMetrics.heightPixels / displayMetrics.density; float dpWidth = displayMetrics.widthPixels / displayMetrics.density; // int popupHeight = Math.round(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, getResources().getDimension(R.dimen.popup_height), getResources().getDisplayMetrics())); // int popupWidth = Math.round(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, getResources().getDimension(R.dimen.popup_width), getResources().getDisplayMetrics())); // int popupHeight = Math.round(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, (float) (dpHeight*0.8), getResources().getDisplayMetrics())); final int popupWidth = Math.round(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, (float) (dpWidth*0.9), mContext.getResources().getDisplayMetrics())); // mainView.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED); // int mainWidth = mainView.getMeasuredWidth(); // int mainHeight = mainView.getMeasuredHeight(); // final PopupWindow popup = new PopupWindow(popupView, popupWidth, popupHeight, true); final ReattachingPopup popup = new ReattachingPopup(mBelongsTo, popupView, popupWidth, ViewGroup.LayoutParams.WRAP_CONTENT, true); // final ReattachingPopup popup = new ReattachingPopup(this, popupView, popupWidth, ViewGroup.LayoutParams.WRAP_CONTENT, true); popup.setBackgroundDrawable(new BitmapDrawable(mContext.getResources(),"")); // popup.setOutsideTouchable(false); popup.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); final Button addRefButton = (Button) popupView.findViewById(R.id.add_ref); final TableFragment tf = mBelongsTo; addRefButton.setOnClickListener(new Button.OnClickListener() { public void onClick(View v) { // Animation slide_up = AnimationUtils.loadAnimation(TemplateGeneratorActivity.theActiveActivity, R.animator.slide_up); LayoutInflater inflater = (LayoutInflater) SlideoutNavigationActivity.theActiveActivity.getSystemService(Context.LAYOUT_INFLATER_SERVICE); View popupReferencesView = inflater.inflate(R.layout.table_view_references, null); LinearLayout reference_list = (LinearLayout) popupReferencesView.findViewById(R.id.reference_list); final ReattachingPopup popupReferences = new ReattachingPopup(tf, popupReferencesView, popupWidth, ViewGroup.LayoutParams.WRAP_CONTENT, true); popupReferences.setBackgroundDrawable(new BitmapDrawable(mContext.getResources(),"")); popupReferences.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); // popupReferences.setOnDismissListener(new OnDismissListener() { // @Override // public void onDismiss() { // Log.d("inputPopup", "now selection set"); // inputPopup.setSelection(inputPopup.getText().length()); // inputPopup.requestFocus(); // } // }); ArrayList<String> allRefs = getAllElementsToRef(((SlideoutNavigationActivity) SlideoutNavigationActivity.theActiveActivity).getRootFragment()); for(String aReference : allRefs){ TextView oneLine = new TextView(SlideoutNavigationActivity.theActiveActivity); oneLine.setText(aReference); oneLine.setTextSize(TypedValue.COMPLEX_UNIT_PX, mContext.getResources().getDimension(R.dimen.text_large)); oneLine.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { String lastCharOfPopup = inputPopup.getText().toString(); if(lastCharOfPopup.length() > 0){ lastCharOfPopup = lastCharOfPopup.substring(lastCharOfPopup.length() - 1); if(!lastCharOfPopup.equals("\n") && !lastCharOfPopup.equals(" ")){ inputPopup.append(" "); } } inputPopup.append("@" + ((TextView) v).getText()+ " "); popupReferences.dismiss(); } }); reference_list.addView(oneLine); } // reference_view.startAnimation(slide_up); popupReferences.showAtLocation(SlideoutNavigationActivity.theActiveActivity.findViewById(android.R.id.content), Gravity.BOTTOM, 0, 0); } }); //following does not work yet // inputPopup.setOnFocusChangeListener(new OnFocusChangeListener() { // // @Override // public void onFocusChange(View v, boolean hasFocus) { // InputMethodManager inputMgr = (InputMethodManager)TemplateGeneratorActivity.theActiveActivity. // getSystemService(Context.INPUT_METHOD_SERVICE); // if(hasFocus){ // inputMgr.showSoftInput(inputPopup, InputMethodManager.SHOW_FORCED); //// inputMgr.toggleSoftInput(InputMethodManager.SHOW_IMPLICIT, //// InputMethodManager.SHOW_IMPLICIT); // //// TemplateGeneratorActivity.theActiveActivity.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); // Log.d("focus_change", "SHOW software-keyboard"); // } // else{ // inputMgr.hideSoftInputFromWindow(inputPopup.getWindowToken(), InputMethodManager.HIDE_IMPLICIT_ONLY); //// inputMgr.toggleSoftInput(InputMethodManager.SHOW_IMPLICIT, //// InputMethodManager.HIDE_IMPLICIT_ONLY); //// TemplateGeneratorActivity.theActiveActivity.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN); //// Log.d("focus_change", "HIDE software-keyboard"); // } // } // }); ll.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // popupHeadline.setText(headline); //old version... but we need to take the content as parent, not popupView // popup.showAtLocation(popupView, Gravity.CENTER, 0, 0); popup.showAtLocation(SlideoutNavigationActivity.theActiveActivity.findViewById(android.R.id.content), Gravity.CENTER, 0, 0); InputMethodManager inputMgr = (InputMethodManager)SlideoutNavigationActivity.theActiveActivity. getSystemService(Context.INPUT_METHOD_SERVICE); inputMgr.showSoftInput(inputPopup, InputMethodManager.SHOW_FORCED); } }); popupView.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { inputPopup.requestFocus(); } }); if(!SlideoutNavigationActivity.getAc().inEditMode()){ Log.d("tableFragment", "set Popup non editable"); // inputPopup.setInputType(InputType.TYPE_NULL); inputPopup.setFocusable(false); addRefButton.setVisibility(View.INVISIBLE); toggleBold.setVisibility(View.INVISIBLE); toggleItalic.setVisibility(View.INVISIBLE); toggleUnderlined.setVisibility(View.INVISIBLE); } else{ Log.d("tableFragment", "set Popup EDITABLE"); } // txt.setTextSize(mContext.getResources().getDimension(R.dimen.text_xxl)); txt.setText(mContext.getResources().getString(R.string.new_element)); txt.setTypeface(null, Typeface.BOLD_ITALIC); txt.setTextColor(mContext.getResources().getColor(R.color.dark_green)); ll.addView(txt); LinearLayout.LayoutParams p = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, Gravity.CENTER_VERTICAL); ll.setLayoutParams(p); ll.setGravity(Gravity.CENTER_VERTICAL); return ll; } private ArrayList<String> getAllElementsToRef(FolderFragment fragmentToSearch){ ArrayList<String> results = new ArrayList<String>(); Log.d("popupReferences", "subdirs: " + fragmentToSearch.dataAdapter.getAll().length); for(FolderElementData currentDatum : fragmentToSearch.dataAdapter.getAll()){ GeneralFragment currentFragment = currentDatum.childFragment; if(currentFragment instanceof FolderFragment){ Log.d("popupReferences", "folderfragment found, descending now"); ArrayList<String> toAdd = getAllElementsToRef((FolderFragment) currentFragment); results.addAll(toAdd); } else if(currentFragment instanceof TableFragment){ Log.d("popupReferences", "tableview found"); } else if(currentFragment instanceof MatrixFragment){ Log.d("popupReferences", "matrix found. Elements:" + (((MatrixFragment) currentFragment).itemsList).size()); for(MatrixItem oneItem : ((MatrixFragment) currentFragment).itemsList){ String oneName = oneItem.getItemName(); results.add(oneName); } } else{ Log.d("popupReferences", "unhandled element found!!!"); } } return results; } }
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy; import static com.google.common.base.Preconditions.checkNotNull; import com.google.auto.value.AutoValue; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.Sets; import com.google.template.soy.SoyFileSetParser.ParseResult; import com.google.template.soy.base.internal.IdGenerator; import com.google.template.soy.base.internal.IncrementingIdGenerator; import com.google.template.soy.base.internal.SoyFileSupplier; import com.google.template.soy.basetree.SyntaxVersion; import com.google.template.soy.error.ErrorReporter; import com.google.template.soy.parsepasses.CheckCallsVisitor; import com.google.template.soy.parsepasses.CheckDelegatesVisitor; import com.google.template.soy.parsepasses.InferRequiredSyntaxVersionVisitor; import com.google.template.soy.parsepasses.ParsePasses; import com.google.template.soy.shared.SoyAstCache; import com.google.template.soy.shared.SoyAstCache.VersionedFile; import com.google.template.soy.sharedpasses.CheckCallingParamTypesVisitor; import com.google.template.soy.sharedpasses.CheckTemplateParamsVisitor; import com.google.template.soy.sharedpasses.CheckTemplateVisibility; import com.google.template.soy.sharedpasses.ReportSyntaxVersionErrorsVisitor; import com.google.template.soy.soyparse.SoyFileParser; import com.google.template.soy.soytree.SoyFileNode; import com.google.template.soy.soytree.SoyFileSetNode; import com.google.template.soy.soytree.TemplateRegistry; import com.google.template.soy.types.SoyTypeRegistry; import java.io.IOException; import java.io.Reader; import java.util.List; import java.util.Set; import javax.annotation.Nullable; /** * Static functions for parsing a set of Soy files into a {@link SoyFileSetNode}. * * <p> Important: Do not use outside of Soy code (treat as superpackage-private). * */ public final class SoyFileSetParser { /** A simple tuple for the result of a parse operation. */ @AutoValue public abstract static class ParseResult { static ParseResult create(SoyFileSetNode soyTree, TemplateRegistry registry) { return new AutoValue_SoyFileSetParser_ParseResult(soyTree, registry); } public abstract SoyFileSetNode fileSet(); public abstract TemplateRegistry registry(); } /** The type registry to resolve type names. */ private final SoyTypeRegistry typeRegistry; /** Optional file cache. */ @Nullable private final SoyAstCache cache; /** User-declared syntax version. */ private final SyntaxVersion declaredSyntaxVersion; /** The suppliers of the Soy files to parse. */ private final List<? extends SoyFileSupplier> soyFileSuppliers; /** Parsing passes. null means that they are disabled.*/ @Nullable private final ParsePasses parsingPasses; /** Whether to run checking passes. */ private final boolean doRunCheckingPasses; /** For reporting parse errors. */ private final ErrorReporter errorReporter; /** * @param typeRegistry The type registry to resolve type names. * @param astCache The AST cache to use, if any. * @param declaredSyntaxVersion User-declared syntax version. * @param soyFileSuppliers The suppliers for the Soy files. Each must have a unique file name. */ public SoyFileSetParser( SoyTypeRegistry typeRegistry, @Nullable SoyAstCache astCache, SyntaxVersion declaredSyntaxVersion, List<? extends SoyFileSupplier> soyFileSuppliers, ParsePasses parsePasses, ErrorReporter errorReporter) { // By default, run all the parsing and checking passes. this( typeRegistry, astCache, declaredSyntaxVersion, soyFileSuppliers, errorReporter, checkNotNull(parsePasses), true); } /** * @param typeRegistry The type registry to resolve type names. * @param astCache The AST cache to use, if any. * @param declaredSyntaxVersion User-declared syntax version. * @param soyFileSuppliers The suppliers for the Soy files. Each must have a unique file name. * @param errorReporter For reporting errors during parsing. * @param parsingPasses The parsing passes to run. * @param doRunCheckingPasses Whether to run checking passes. */ public SoyFileSetParser( SoyTypeRegistry typeRegistry, @Nullable SoyAstCache astCache, SyntaxVersion declaredSyntaxVersion, List<? extends SoyFileSupplier> soyFileSuppliers, ErrorReporter errorReporter, @Nullable ParsePasses parsingPasses, boolean doRunCheckingPasses) { Preconditions.checkArgument( (astCache == null) || (parsingPasses != null && doRunCheckingPasses), "AST caching is only allowed when all parsing and checking passes are enabled, to avoid " + "caching inconsistent versions"); this.typeRegistry = typeRegistry; this.cache = astCache; this.declaredSyntaxVersion = declaredSyntaxVersion; this.soyFileSuppliers = soyFileSuppliers; this.errorReporter = errorReporter; verifyUniquePaths(soyFileSuppliers); this.parsingPasses = parsingPasses; this.doRunCheckingPasses = doRunCheckingPasses; } /** * Parses a set of Soy files, returning a structure containing the parse tree and any errors. */ public ParseResult parse() { try { return parseWithVersions(); } catch (IOException e) { // parse has 9 callers in SoyFileSet, and those are public API methods, // whose signatures it is infeasible to change. throw Throwables.propagate(e); } } /** * Ensures all SoyFileSuppliers have unique paths. */ private static void verifyUniquePaths(Iterable<? extends SoyFileSupplier> soyFileSuppliers) { Set<String> paths = Sets.newHashSet(); for (SoyFileSupplier supplier : soyFileSuppliers) { Preconditions.checkArgument( !paths.contains(supplier.getFilePath()), "Two file suppliers have the same path: %s", supplier.getFilePath()); paths.add(supplier.getFilePath()); } } /** * Parses a set of Soy files, returning a structure containing the parse tree and template * registry. */ private ParseResult parseWithVersions() throws IOException { Preconditions.checkState( (cache == null) || (parsingPasses != null && doRunCheckingPasses), "AST caching is only allowed when all parsing and checking passes are enabled, to avoid " + "caching inconsistent versions"); IdGenerator nodeIdGen = (cache != null) ? cache.getNodeIdGenerator() : new IncrementingIdGenerator(); SoyFileSetNode soyTree = new SoyFileSetNode(nodeIdGen.genId(), nodeIdGen); boolean filesWereSkipped = false; for (SoyFileSupplier fileSupplier : soyFileSuppliers) { SoyFileSupplier.Version version = fileSupplier.getVersion(); VersionedFile cachedFile = cache != null ? cache.get(fileSupplier.getFilePath(), version) : null; SoyFileNode node; if (cachedFile == null) { //noinspection SynchronizationOnLocalVariableOrMethodParameter IntelliJ synchronized (nodeIdGen) { // Avoid using the same ID generator in multiple threads. node = parseSoyFileHelper(fileSupplier, nodeIdGen, typeRegistry); // TODO(user): implement error recovery and keep on trucking in order to display // as many errors as possible. Currently, the later passes just spew NPEs if run on // a malformed parse tree. if (node == null) { filesWereSkipped = true; continue; } if (parsingPasses != null) { // Run passes that are considered part of initial parsing. parsingPasses.run(node, nodeIdGen); } } if (doRunCheckingPasses) { // Run passes that check the tree. runSingleFileCheckingPasses(node); } if (cache != null) { cache.put(fileSupplier.getFilePath(), VersionedFile.of(node, version)); } } else { node = cachedFile.file(); } soyTree.addChild(node); } TemplateRegistry registry = new TemplateRegistry(soyTree, errorReporter); // Run passes that check the tree iff we successfully parsed every file. if (!filesWereSkipped && doRunCheckingPasses) { runWholeFileSetCheckingPasses(registry, soyTree); } return ParseResult.create(soyTree, registry); } /** * Private helper for {@code parseWithVersions()} to parse one Soy file. * * @param soyFileSupplier Supplier of the Soy file content and path. * @param nodeIdGen The generator of node ids. * @return The resulting parse tree for one Soy file and the version from which it was parsed. */ private SoyFileNode parseSoyFileHelper( SoyFileSupplier soyFileSupplier, IdGenerator nodeIdGen, SoyTypeRegistry typeRegistry) throws IOException { try (Reader soyFileReader = soyFileSupplier.open()) { return new SoyFileParser( typeRegistry, nodeIdGen, soyFileReader, soyFileSupplier.getSoyFileKind(), soyFileSupplier.getFilePath(), errorReporter) .parseSoyFile(); } } /** * Private helper for {@code parseWithVersion()} that operate on single files. */ private void runSingleFileCheckingPasses(SoyFileNode fileNode) { new ReportSyntaxVersionErrorsVisitor(declaredSyntaxVersion, true, errorReporter) .exec(fileNode); // Check for errors based on inferred (as opposed to declared) required syntax version. SyntaxVersion inferredSyntaxVersion = new InferRequiredSyntaxVersionVisitor().exec(fileNode); if (inferredSyntaxVersion.num > declaredSyntaxVersion.num) { new ReportSyntaxVersionErrorsVisitor(inferredSyntaxVersion, false, errorReporter) .exec(fileNode); } } /** * Private helper for {@code parseWithVersions()} to run checking passes that require the whole * tree. */ private void runWholeFileSetCheckingPasses(TemplateRegistry registry, SoyFileSetNode soyTree) { new CheckTemplateParamsVisitor(registry, declaredSyntaxVersion, errorReporter).exec(soyTree); new CheckDelegatesVisitor(registry, errorReporter).exec(soyTree); new CheckCallsVisitor(registry, errorReporter).exec(soyTree); new CheckCallingParamTypesVisitor(registry, errorReporter).exec(soyTree); new CheckTemplateVisibility(registry, errorReporter).exec(soyTree); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.extensions.sql.impl.rel; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Set; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.plan.RelOptCluster; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.plan.RelOptPlanner; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.plan.RelTraitSet; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rel.RelNode; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rel.core.Calc; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rel.logical.LogicalCalc; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rel.type.RelDataType; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexCall; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexDynamicParam; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexFieldAccess; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexInputRef; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexLiteral; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexLocalRef; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexNode; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexProgram; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexShuttle; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexUtil; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexVisitorImpl; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.tools.RelBuilder; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.util.Litmus; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.util.Util; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.util.graph.DefaultDirectedGraph; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.util.graph.DefaultEdge; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.util.graph.DirectedGraph; import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.util.graph.TopologicalOrderIterator; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.primitives.Ints; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; /** * CalcRelSplitter operates on a {@link Calc} with multiple {@link RexCall} sub-expressions that * cannot all be implemented by a single concrete {@link RelNode}. * * <p>This is a copy of {@link * org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rel.rules.CalcRelSplitter} modified to * work with Beam. TODO(CALCITE-4538) consider contributing these changes back upstream. * * <p>For example, the Java and Fennel calculator do not implement an identical set of operators. * The Calc can be used to split a single Calc with mixed Java- and Fennel-only operators into a * tree of Calc object that can each be individually implemented by either Java or Fennel.and splits * it into several Calc instances. * * <p>Currently the splitter is only capable of handling two "rel types". That is, it can deal with * Java vs. Fennel Calcs, but not Java vs. Fennel vs. some other type of Calc. * * <p>See {@link ProjectToWindowRule} for an example of how this class is used. */ @SuppressWarnings({"all", "OperatorPrecedence"}) public class CalcRelSplitter { // ~ Static fields/initializers --------------------------------------------- private static final Logger RULE_LOGGER = RelOptPlanner.LOGGER; // ~ Instance fields -------------------------------------------------------- protected final RexProgram program; private final RelDataTypeFactory typeFactory; private final RelType[] relTypes; private final RelOptCluster cluster; private final RelTraitSet traits; private final RelNode child; protected final RelBuilder relBuilder; // ~ Constructors ----------------------------------------------------------- /** * Constructs a CalcRelSplitter. * * @param calc Calc to split * @param relTypes Array of rel types, e.g. {Java, Fennel}. Must be distinct. */ public CalcRelSplitter(Calc calc, RelBuilder relBuilder, RelType[] relTypes) { this.relBuilder = relBuilder; for (int i = 0; i < relTypes.length; i++) { assert relTypes[i] != null; for (int j = 0; j < i; j++) { assert relTypes[i] != relTypes[j] : "Rel types must be distinct"; } } this.program = calc.getProgram(); this.cluster = calc.getCluster(); this.traits = calc.getTraitSet(); this.typeFactory = calc.getCluster().getTypeFactory(); this.child = calc.getInput(); this.relTypes = relTypes; } // ~ Methods ---------------------------------------------------------------- public RelNode execute() { // Check that program is valid. In particular, this means that every // expression is trivial (either an atom, or a function applied to // references to atoms) and every expression depends only on // expressions to the left. assert program.isValid(Litmus.THROW, null); final List<RexNode> exprList = program.getExprList(); final RexNode[] exprs = exprList.toArray(new RexNode[0]); assert !RexUtil.containComplexExprs(exprList); // Figure out what level each expression belongs to. int[] exprLevels = new int[exprs.length]; // The type of a level is given by // relTypes[levelTypeOrdinals[level]]. int[] levelTypeOrdinals = new int[exprs.length]; int levelCount = chooseLevels(exprs, -1, exprLevels, levelTypeOrdinals); // For each expression, figure out which is the highest level where it // is used. int[] exprMaxUsingLevelOrdinals = new HighestUsageFinder(exprs, exprLevels).getMaxUsingLevelOrdinals(); // If expressions are used as outputs, mark them as higher than that. final List<RexLocalRef> projectRefList = program.getProjectList(); final RexLocalRef conditionRef = program.getCondition(); for (RexLocalRef projectRef : projectRefList) { exprMaxUsingLevelOrdinals[projectRef.getIndex()] = levelCount; } if (conditionRef != null) { exprMaxUsingLevelOrdinals[conditionRef.getIndex()] = levelCount; } // Print out what we've got. if (RULE_LOGGER.isTraceEnabled()) { traceLevelExpressions(exprs, exprLevels, levelTypeOrdinals, levelCount); } // Now build the calcs. RelNode rel = child; final int inputFieldCount = program.getInputRowType().getFieldCount(); int[] inputExprOrdinals = identityArray(inputFieldCount); boolean doneCondition = false; for (int level = 0; level < levelCount; level++) { final int[] projectExprOrdinals; final RelDataType outputRowType; if (level == (levelCount - 1)) { outputRowType = program.getOutputRowType(); projectExprOrdinals = new int[projectRefList.size()]; for (int i = 0; i < projectExprOrdinals.length; i++) { projectExprOrdinals[i] = projectRefList.get(i).getIndex(); } } else { outputRowType = null; // Project the expressions which are computed at this level or // before, and will be used at later levels. List<Integer> projectExprOrdinalList = new ArrayList<>(); for (int i = 0; i < exprs.length; i++) { RexNode expr = exprs[i]; if (expr instanceof RexLiteral) { // Don't project literals. They are always created in // the level where they are used. exprLevels[i] = -1; continue; } if ((exprLevels[i] <= level) && (exprMaxUsingLevelOrdinals[i] > level)) { projectExprOrdinalList.add(i); } } projectExprOrdinals = Ints.toArray(projectExprOrdinalList); } final RelType relType = relTypes[levelTypeOrdinals[level]]; // Can we do the condition this level? int conditionExprOrdinal = -1; if ((conditionRef != null) && !doneCondition) { conditionExprOrdinal = conditionRef.getIndex(); if ((exprLevels[conditionExprOrdinal] > level) || !relType.supportsCondition()) { // stand down -- we're not ready to do the condition yet conditionExprOrdinal = -1; } else { doneCondition = true; } } RexProgram program1 = createProgramForLevel( level, levelCount, rel.getRowType(), exprs, exprLevels, inputExprOrdinals, projectExprOrdinals, conditionExprOrdinal, outputRowType); rel = relType.makeRel(cluster, traits, relBuilder, rel, program1); rel = handle(rel); // The outputs of this level will be the inputs to the next level. inputExprOrdinals = projectExprOrdinals; } Preconditions.checkArgument(doneCondition || (conditionRef == null), "unhandled condition"); return rel; } /** * Opportunity to further refine the relational expression created for a given level. The default * implementation returns the relational expression unchanged. */ protected RelNode handle(RelNode rel) { return rel; } /** * Figures out which expressions to calculate at which level. * * @param exprs Array of expressions * @param conditionOrdinal Ordinal of the condition expression, or -1 if no condition * @param exprLevels Level ordinal for each expression (output) * @param levelTypeOrdinals The type of each level (output) * @return Number of levels required */ private int chooseLevels( final RexNode[] exprs, int conditionOrdinal, int[] exprLevels, int[] levelTypeOrdinals) { final int inputFieldCount = program.getInputRowType().getFieldCount(); int levelCount = 0; final MaxInputFinder maxInputFinder = new MaxInputFinder(exprLevels); boolean[] relTypesPossibleForTopLevel = new boolean[relTypes.length]; Arrays.fill(relTypesPossibleForTopLevel, true); // Compute the order in which to visit expressions. final List<Set<Integer>> cohorts = getCohorts(); final List<Integer> permutation = computeTopologicalOrdering(exprs, cohorts); for (int i : permutation) { RexNode expr = exprs[i]; final boolean condition = i == conditionOrdinal; if (i < inputFieldCount) { assert expr instanceof RexInputRef; exprLevels[i] = -1; continue; } // Deduce the minimum level of the expression. An expression must // be at a level greater than or equal to all of its inputs. int level = maxInputFinder.maxInputFor(expr); // If the expression is in a cohort, it can occur no lower than the // levels of other expressions in the same cohort. Set<Integer> cohort = findCohort(cohorts, i); if (cohort != null) { for (Integer exprOrdinal : cohort) { if (exprOrdinal == i) { // Already did this member of the cohort. It's a waste // of effort to repeat. continue; } final RexNode cohortExpr = exprs[exprOrdinal]; int cohortLevel = maxInputFinder.maxInputFor(cohortExpr); if (cohortLevel > level) { level = cohortLevel; } } } // Try to implement this expression at this level. // If that is not possible, try to implement it at higher levels. levelLoop: for (; ; ++level) { if (level >= levelCount) { // This is a new level. We can use any type we like. for (int relTypeOrdinal = 0; relTypeOrdinal < relTypes.length; relTypeOrdinal++) { if (!relTypesPossibleForTopLevel[relTypeOrdinal]) { continue; } if (relTypes[relTypeOrdinal].canImplement(expr, condition)) { // Success. We have found a type where we can // implement this expression. exprLevels[i] = level; levelTypeOrdinals[level] = relTypeOrdinal; assert (level == 0) || (levelTypeOrdinals[level - 1] != levelTypeOrdinals[level]) : "successive levels of same type"; // Figure out which of the other reltypes are // still possible for this level. // Previous reltypes are not possible. for (int j = 0; j < relTypeOrdinal; ++j) { relTypesPossibleForTopLevel[j] = false; } // Successive reltypes may be possible. for (int j = relTypeOrdinal + 1; j < relTypes.length; ++j) { if (relTypesPossibleForTopLevel[j]) { relTypesPossibleForTopLevel[j] = relTypes[j].canImplement(expr, condition); } } // Move to next level. levelTypeOrdinals[levelCount] = firstSet(relTypesPossibleForTopLevel); ++levelCount; Arrays.fill(relTypesPossibleForTopLevel, true); break levelLoop; } } // None of the reltypes still active for this level could // implement expr. But maybe we could succeed with a new // level, with all options open? if (count(relTypesPossibleForTopLevel) >= relTypes.length) { // Cannot implement for any type. throw new AssertionError("cannot implement " + expr); } levelTypeOrdinals[levelCount] = firstSet(relTypesPossibleForTopLevel); ++levelCount; Arrays.fill(relTypesPossibleForTopLevel, true); } else { final int levelTypeOrdinal = levelTypeOrdinals[level]; if (!relTypes[levelTypeOrdinal].canImplement(expr, condition)) { // Cannot implement this expression in this type; // continue to next level. continue; } exprLevels[i] = level; break; } } } if (levelCount == 0) { // At least one level is always required. levelCount = 1; } return levelCount; } /** * Computes the order in which to visit expressions, so that we decide the level of an expression * only after the levels of lower expressions have been decided. * * <p>First, we need to ensure that an expression is visited after all of its inputs. * * <p>Further, if the expression is a member of a cohort, we need to visit it after the inputs of * all other expressions in that cohort. With this condition, expressions in the same cohort will * very likely end up in the same level. * * <p>Note that if there are no cohorts, the expressions from the {@link RexProgram} are already * in a suitable order. We perform the topological sort just to ensure that the code path is * well-trodden. * * @param exprs Expressions * @param cohorts List of cohorts, each of which is a set of expr ordinals * @return Expression ordinals in topological order */ private static List<Integer> computeTopologicalOrdering( RexNode[] exprs, List<Set<Integer>> cohorts) { final DirectedGraph<Integer, DefaultEdge> graph = DefaultDirectedGraph.create(); for (int i = 0; i < exprs.length; i++) { graph.addVertex(i); } for (int i = 0; i < exprs.length; i++) { final RexNode expr = exprs[i]; final Set<Integer> cohort = findCohort(cohorts, i); final Set<Integer> targets; if (cohort == null) { targets = Collections.singleton(i); } else { targets = cohort; } expr.accept( new RexVisitorImpl<Void>(true) { @Override public Void visitLocalRef(RexLocalRef localRef) { for (Integer target : targets) { graph.addEdge(localRef.getIndex(), target); } return null; } }); } TopologicalOrderIterator<Integer, DefaultEdge> iter = new TopologicalOrderIterator<>(graph); final List<Integer> permutation = new ArrayList<>(); while (iter.hasNext()) { permutation.add(iter.next()); } return permutation; } /** * Finds the cohort that contains the given integer, or returns null. * * @param cohorts List of cohorts, each a set of integers * @param ordinal Integer to search for * @return Cohort that contains the integer, or null if not found */ private static @Nullable Set<Integer> findCohort(List<Set<Integer>> cohorts, int ordinal) { for (Set<Integer> cohort : cohorts) { if (cohort.contains(ordinal)) { return cohort; } } return null; } private static int[] identityArray(int length) { final int[] ints = new int[length]; for (int i = 0; i < ints.length; i++) { ints[i] = i; } return ints; } /** * Creates a program containing the expressions for a given level. * * <p>The expression list of the program will consist of all entries in the expression list <code> * allExprs[i]</code> for which the corresponding level ordinal <code>exprLevels[i]</code> is * equal to <code>level</code>. Expressions are mapped according to <code>inputExprOrdinals</code> * . * * @param level Level ordinal * @param levelCount Number of levels * @param inputRowType Input row type * @param allExprs Array of all expressions * @param exprLevels Array of the level ordinal of each expression * @param inputExprOrdinals Ordinals in the expression list of input expressions. Input expression * <code>i</code> will be found at position <code>inputExprOrdinals[i]</code>. * @param projectExprOrdinals Ordinals of the expressions to be output this level. * @param conditionExprOrdinal Ordinal of the expression to form the condition for this level, or * -1 if there is no condition. * @param outputRowType Output row type * @return Relational expression */ private RexProgram createProgramForLevel( int level, int levelCount, RelDataType inputRowType, RexNode[] allExprs, int[] exprLevels, int[] inputExprOrdinals, final int[] projectExprOrdinals, int conditionExprOrdinal, @Nullable RelDataType outputRowType) { // Build a list of expressions to form the calc. List<RexNode> exprs = new ArrayList<>(); // exprInverseOrdinals describes where an expression in allExprs comes // from -- from an input, from a calculated expression, or -1 if not // available at this level. int[] exprInverseOrdinals = new int[allExprs.length]; Arrays.fill(exprInverseOrdinals, -1); int j = 0; // First populate the inputs. They were computed at some previous level // and are used here. for (int i = 0; i < inputExprOrdinals.length; i++) { final int inputExprOrdinal = inputExprOrdinals[i]; exprs.add(new RexInputRef(i, allExprs[inputExprOrdinal].getType())); exprInverseOrdinals[inputExprOrdinal] = j; ++j; } // Next populate the computed expressions. final RexShuttle shuttle = new InputToCommonExprConverter( exprInverseOrdinals, exprLevels, level, inputExprOrdinals, allExprs); for (int i = 0; i < allExprs.length; i++) { if (exprLevels[i] == level || exprLevels[i] == -1 && level == (levelCount - 1) && allExprs[i] instanceof RexLiteral) { RexNode expr = allExprs[i]; final RexNode translatedExpr = expr.accept(shuttle); exprs.add(translatedExpr); assert exprInverseOrdinals[i] == -1; exprInverseOrdinals[i] = j; ++j; } } // Form the projection and condition list. Project and condition // ordinals are offsets into allExprs, so we need to map them into // exprs. final List<RexLocalRef> projectRefs = new ArrayList<>(projectExprOrdinals.length); final List<String> fieldNames = new ArrayList<>(projectExprOrdinals.length); for (int i = 0; i < projectExprOrdinals.length; i++) { final int projectExprOrdinal = projectExprOrdinals[i]; final int index = exprInverseOrdinals[projectExprOrdinal]; assert index >= 0; RexNode expr = allExprs[projectExprOrdinal]; projectRefs.add(new RexLocalRef(index, expr.getType())); // Inherit meaningful field name if possible. fieldNames.add(deriveFieldName(expr, i)); } RexLocalRef conditionRef; if (conditionExprOrdinal >= 0) { final int index = exprInverseOrdinals[conditionExprOrdinal]; conditionRef = new RexLocalRef(index, allExprs[conditionExprOrdinal].getType()); } else { conditionRef = null; } if (outputRowType == null) { outputRowType = RexUtil.createStructType(typeFactory, projectRefs, fieldNames, null); } final RexProgram program = new RexProgram(inputRowType, exprs, projectRefs, conditionRef, outputRowType); // Program is NOT normalized here (e.g. can contain literals in // call operands), since literals should be inlined. return program; } private String deriveFieldName(RexNode expr, int ordinal) { if (expr instanceof RexInputRef) { int inputIndex = ((RexInputRef) expr).getIndex(); String fieldName = child.getRowType().getFieldList().get(inputIndex).getName(); // Don't inherit field names like '$3' from child: that's // confusing. if (!fieldName.startsWith("$") || fieldName.startsWith("$EXPR")) { return fieldName; } } return "$" + ordinal; } /** * Traces the given array of level expression lists at the finer level. * * @param exprs Array expressions * @param exprLevels For each expression, the ordinal of its level * @param levelTypeOrdinals For each level, the ordinal of its type in the {@link #relTypes} array * @param levelCount The number of levels */ private void traceLevelExpressions( RexNode[] exprs, int[] exprLevels, int[] levelTypeOrdinals, int levelCount) { StringWriter traceMsg = new StringWriter(); PrintWriter traceWriter = new PrintWriter(traceMsg); traceWriter.println("FarragoAutoCalcRule result expressions for: "); traceWriter.println(program.toString()); for (int level = 0; level < levelCount; level++) { traceWriter.println("Rel Level " + level + ", type " + relTypes[levelTypeOrdinals[level]]); for (int i = 0; i < exprs.length; i++) { RexNode expr = exprs[i]; assert (exprLevels[i] >= -1) && (exprLevels[i] < levelCount) : "expression's level is out of range"; if (exprLevels[i] == level) { traceWriter.println("\t" + i + ": " + expr); } } traceWriter.println(); } String msg = traceMsg.toString(); RULE_LOGGER.trace(msg); } /** Returns the number of bits set in an array. */ private static int count(boolean[] booleans) { int count = 0; for (boolean b : booleans) { if (b) { ++count; } } return count; } /** Returns the index of the first set bit in an array. */ private static int firstSet(boolean[] booleans) { for (int i = 0; i < booleans.length; i++) { if (booleans[i]) { return i; } } return -1; } /** * Searches for a value in a map, and returns the position where it was found, or -1. * * @param value Value to search for * @param map Map to search in * @return Ordinal of value in map, or -1 if not found */ private static int indexOf(int value, int[] map) { for (int i = 0; i < map.length; i++) { if (value == map[i]) { return i; } } return -1; } /** * Returns whether a relational expression can be implemented solely in a given {@link RelType}. * * @param rel Calculation relational expression * @param relTypeName Name of a {@link RelType} * @return Whether relational expression can be implemented */ protected boolean canImplement(LogicalCalc rel, String relTypeName) { for (RelType relType : relTypes) { if (relType.name.equals(relTypeName)) { return relType.canImplement(rel.getProgram()); } } throw new AssertionError("unknown type " + relTypeName); } /** * Returns a list of sets of expressions that should be on the same level. * * <p>For example, if this method returns { {3, 5}, {4, 7} }, it means that expressions 3 and 5, * should be on the same level, and expressions 4 and 7 should be on the same level. The two * cohorts do not need to be on the same level. * * <p>The list is best effort. If it is not possible to arrange that the expressions in a cohort * are on the same level, the {@link #execute()} method will still succeed. * * <p>The default implementation of this method returns the empty list; expressions will be put on * the most suitable level. This is generally the lowest possible level, except for literals, * which are placed at the level where they are used. * * @return List of cohorts, that is sets of expressions, that the splitting algorithm should * attempt to place on the same level */ protected List<Set<Integer>> getCohorts() { return Collections.emptyList(); } // ~ Inner Classes ---------------------------------------------------------- /** Type of relational expression. Determines which kinds of expressions it can handle. */ public abstract static class RelType { private final String name; protected RelType(String name) { this.name = name; } @Override public String toString() { return name; } protected abstract boolean canImplement(RexFieldAccess field); protected abstract boolean canImplement(RexDynamicParam param); protected abstract boolean canImplement(RexLiteral literal); protected abstract boolean canImplement(RexCall call); protected boolean supportsCondition() { return true; } protected RelNode makeRel( RelOptCluster cluster, RelTraitSet traitSet, RelBuilder relBuilder, RelNode input, RexProgram program) { return LogicalCalc.create(input, program); } /** * Returns whether this <code>RelType</code> can implement a given expression. * * @param expr Expression * @param condition Whether expression is a condition * @return Whether this <code>RelType</code> can implement a given expression. */ public boolean canImplement(RexNode expr, boolean condition) { if (condition && !supportsCondition()) { return false; } try { expr.accept(new ImplementTester(this)); return true; } catch (CannotImplement e) { Util.swallow(e, null); return false; } } /** * Returns whether this tester's <code>RelType</code> can implement a given program. * * @param program Program * @return Whether this tester's <code>RelType</code> can implement a given program. */ public boolean canImplement(RexProgram program) { if ((program.getCondition() != null) && !canImplement(program.getCondition(), true)) { return false; } for (RexNode expr : program.getExprList()) { if (!canImplement(expr, false)) { return false; } } return true; } } /** * Visitor which returns whether an expression can be implemented in a given type of relational * expression. */ private static class ImplementTester extends RexVisitorImpl<Void> { private final RelType relType; ImplementTester(RelType relType) { super(false); this.relType = relType; } @Override public Void visitCall(RexCall call) { if (!relType.canImplement(call)) { throw CannotImplement.INSTANCE; } return null; } @Override public Void visitDynamicParam(RexDynamicParam dynamicParam) { if (!relType.canImplement(dynamicParam)) { throw CannotImplement.INSTANCE; } return null; } @Override public Void visitFieldAccess(RexFieldAccess fieldAccess) { if (!relType.canImplement(fieldAccess)) { throw CannotImplement.INSTANCE; } return null; } @Override public Void visitLiteral(RexLiteral literal) { if (!relType.canImplement(literal)) { throw CannotImplement.INSTANCE; } return null; } } /** Control exception for {@link ImplementTester}. */ private static class CannotImplement extends RuntimeException { @SuppressWarnings("ThrowableInstanceNeverThrown") static final CannotImplement INSTANCE = new CannotImplement(); } /** * Shuttle which converts every reference to an input field in an expression to a reference to a * common sub-expression. */ private static class InputToCommonExprConverter extends RexShuttle { private final int[] exprInverseOrdinals; private final int[] exprLevels; private final int level; private final int[] inputExprOrdinals; private final RexNode[] allExprs; InputToCommonExprConverter( int[] exprInverseOrdinals, int[] exprLevels, int level, int[] inputExprOrdinals, RexNode[] allExprs) { this.exprInverseOrdinals = exprInverseOrdinals; this.exprLevels = exprLevels; this.level = level; this.inputExprOrdinals = inputExprOrdinals; this.allExprs = allExprs; } @Override public RexNode visitInputRef(RexInputRef input) { final int index = exprInverseOrdinals[input.getIndex()]; assert index >= 0; return new RexLocalRef(index, input.getType()); } @Override public RexNode visitLocalRef(RexLocalRef local) { // A reference to a local variable becomes a reference to an input // if the local was computed at a previous level. final int localIndex = local.getIndex(); final int exprLevel = exprLevels[localIndex]; if (exprLevel < level) { if (allExprs[localIndex] instanceof RexLiteral) { // Expression is to be inlined. Use the original expression. return allExprs[localIndex]; } int inputIndex = indexOf(localIndex, inputExprOrdinals); assert inputIndex >= 0; return new RexLocalRef(inputIndex, local.getType()); } else { // It's a reference to what was a local expression at the // previous level, and was then projected. final int exprIndex = exprInverseOrdinals[localIndex]; return new RexLocalRef(exprIndex, local.getType()); } } } /** Finds the highest level used by any of the inputs of a given expression. */ private static class MaxInputFinder extends RexVisitorImpl<Void> { int level; private final int[] exprLevels; MaxInputFinder(int[] exprLevels) { super(true); this.exprLevels = exprLevels; } @Override public Void visitLocalRef(RexLocalRef localRef) { int inputLevel = exprLevels[localRef.getIndex()]; level = Math.max(level, inputLevel); return null; } /** Returns the highest level of any of the inputs of an expression. */ public int maxInputFor(RexNode expr) { level = 0; expr.accept(this); return level; } } /** * Builds an array of the highest level which contains an expression which uses each expression as * an input. */ private static class HighestUsageFinder extends RexVisitorImpl<Void> { private final int[] maxUsingLevelOrdinals; private int currentLevel; HighestUsageFinder(RexNode[] exprs, int[] exprLevels) { super(true); this.maxUsingLevelOrdinals = new int[exprs.length]; Arrays.fill(maxUsingLevelOrdinals, -1); for (int i = 0; i < exprs.length; i++) { if (exprs[i] instanceof RexLiteral) { // Literals are always used directly. It never makes sense // to compute them at a lower level and project them to // where they are used. maxUsingLevelOrdinals[i] = -1; continue; } currentLevel = exprLevels[i]; @SuppressWarnings("argument.type.incompatible") final Void unused = exprs[i].accept(this); } } public int[] getMaxUsingLevelOrdinals() { return maxUsingLevelOrdinals; } @Override public Void visitLocalRef(RexLocalRef ref) { final int index = ref.getIndex(); maxUsingLevelOrdinals[index] = Math.max(maxUsingLevelOrdinals[index], currentLevel); return null; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.web; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.spy; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.security.KeyStore; import java.security.PrivateKey; import java.security.SecureRandom; import java.security.cert.Certificate; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import org.apache.bookkeeper.test.PortManager; import org.apache.pulsar.broker.MockedBookKeeperClientFactory; import org.apache.pulsar.broker.PulsarService; import org.apache.pulsar.broker.ServiceConfiguration; import org.apache.pulsar.client.admin.PulsarAdmin; import org.apache.pulsar.client.admin.PulsarAdminBuilder; import org.apache.pulsar.client.admin.PulsarAdminException.ConflictException; import org.apache.pulsar.client.impl.auth.AuthenticationTls; import org.apache.pulsar.common.policies.data.ClusterData; import org.apache.pulsar.common.util.SecurityUtility; import org.apache.pulsar.zookeeper.MockedZooKeeperClientFactoryImpl; import org.apache.zookeeper.CreateMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.Test; import com.google.common.io.CharStreams; import com.google.common.io.Closeables; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; /** * Tests for the {@code WebService} class. Note that this test only covers the newly added ApiVersionFilter related * tests for now as this test class was added quite a bit after the class was written. * */ public class WebServiceTest { private PulsarService pulsar; private final static int BROKER_WEBSERVICE_PORT = PortManager.nextFreePort(); private final static int BROKER_WEBSERVICE_PORT_TLS = PortManager.nextFreePort(); private static final String BROKER_URL_BASE = "http://localhost:" + BROKER_WEBSERVICE_PORT; private static final String BROKER_URL_BASE_TLS = "https://localhost:" + BROKER_WEBSERVICE_PORT_TLS; private static final String BROKER_LOOKUP_URL = BROKER_URL_BASE + "/lookup/v2/destination/persistent/my-property/local/my-namespace/my-topic"; private static final String BROKER_LOOKUP_URL_TLS = BROKER_URL_BASE_TLS + "/lookup/v2/destination/persistent/my-property/local/my-namespace/my-topic"; private static final String TLS_SERVER_CERT_FILE_PATH = "./src/test/resources/certificate/server.crt"; private static final String TLS_SERVER_KEY_FILE_PATH = "./src/test/resources/certificate/server.key"; private static final String TLS_CLIENT_CERT_FILE_PATH = "./src/test/resources/certificate/client.crt"; private static final String TLS_CLIENT_KEY_FILE_PATH = "./src/test/resources/certificate/client.key"; /** * Test that the {@WebService} class properly passes the allowUnversionedClients value. We do this by setting * allowUnversionedClients to true, then making a request with no version, which should go through. * */ @Test public void testDefaultClientVersion() throws Exception { setupEnv(true, "1.0", true, false, false, false); try { // Make an HTTP request to lookup a namespace. The request should // succeed makeHttpRequest(false, false); } catch (Exception e) { Assert.fail("HTTP request to lookup a namespace shouldn't fail ", e); } } /** * Test that if enableTls option is enabled, WebServcie is available both on HTTP and HTTPS. * * @throws Exception */ @Test public void testTlsEnabled() throws Exception { setupEnv(false, "1.0", false, true, false, false); // Make requests both HTTP and HTTPS. The requests should succeed try { makeHttpRequest(false, false); } catch (Exception e) { Assert.fail("HTTP request shouldn't fail ", e); } try { makeHttpRequest(true, false); } catch (Exception e) { Assert.fail("HTTPS request shouldn't fail ", e); } } /** * Test that if enableTls option is disabled, WebServcie is available only on HTTP. * * @throws Exception */ @Test public void testTlsDisabled() throws Exception { setupEnv(false, "1.0", false, false, false, false); // Make requests both HTTP and HTTPS. Only the HTTP request should succeed try { makeHttpRequest(false, false); } catch (Exception e) { Assert.fail("HTTP request shouldn't fail ", e); } try { makeHttpRequest(true, false); Assert.fail("HTTPS request should fail "); } catch (Exception e) { Assert.assertTrue(e.getMessage().contains("Connection refused")); } } /** * Test that if enableAuth option and allowInsecure option are enabled, WebServcie requires trusted/untrusted client * certificate. * * @throws Exception */ @Test public void testTlsAuthAllowInsecure() throws Exception { setupEnv(false, "1.0", false, true, true, true); // Only the request with client certificate should succeed try { makeHttpRequest(true, false); Assert.fail("Request without client certficate should fail"); } catch (Exception e) { Assert.assertTrue(e.getMessage().contains("HTTP response code: 401")); } try { makeHttpRequest(true, true); } catch (Exception e) { Assert.fail("Request with client certificate shouldn't fail", e); } } /** * Test that if enableAuth option is enabled, WebServcie requires trusted client certificate. * * @throws Exception */ @Test public void testTlsAuthDisallowInsecure() throws Exception { setupEnv(false, "1.0", false, true, true, false); // Only the request with trusted client certificate should succeed try { makeHttpRequest(true, false); Assert.fail("Request without client certficate should fail"); } catch (Exception e) { Assert.assertTrue(e.getMessage().contains("HTTP response code: 401")); } try { makeHttpRequest(true, true); } catch (Exception e) { Assert.fail("Request with client certificate shouldn't fail", e); } } @Test public void testSplitPath() { String result = PulsarWebResource.splitPath("prop/cluster/ns/topic1", 4); Assert.assertEquals(result, "topic1"); } private String makeHttpRequest(boolean useTls, boolean useAuth) throws Exception { InputStream response = null; try { if (useTls) { KeyManager[] keyManagers = null; if (useAuth) { Certificate[] tlsCert = SecurityUtility.loadCertificatesFromPemFile(TLS_CLIENT_CERT_FILE_PATH); PrivateKey tlsKey = SecurityUtility.loadPrivateKeyFromPemFile(TLS_CLIENT_KEY_FILE_PATH); KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType()); ks.load(null, null); ks.setKeyEntry("private", tlsKey, "".toCharArray(), tlsCert); KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(ks, "".toCharArray()); keyManagers = kmf.getKeyManagers(); } TrustManager[] trustManagers = InsecureTrustManagerFactory.INSTANCE.getTrustManagers(); SSLContext sslCtx = SSLContext.getInstance("TLS"); sslCtx.init(keyManagers, trustManagers, new SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sslCtx.getSocketFactory()); response = new URL(BROKER_LOOKUP_URL_TLS).openStream(); } else { response = new URL(BROKER_LOOKUP_URL).openStream(); } String resp = CharStreams.toString(new InputStreamReader(response)); log.info("Response: {}", resp); return resp; } finally { Closeables.close(response, false); } } private void setupEnv(boolean enableFilter, String minApiVersion, boolean allowUnversionedClients, boolean enableTls, boolean enableAuth, boolean allowInsecure) throws Exception { Set<String> providers = new HashSet<>(); providers.add("org.apache.pulsar.broker.authentication.AuthenticationProviderTls"); Set<String> roles = new HashSet<>(); roles.add("client"); ServiceConfiguration config = new ServiceConfiguration(); config.setAdvertisedAddress("localhost"); config.setWebServicePort(BROKER_WEBSERVICE_PORT); config.setWebServicePortTls(BROKER_WEBSERVICE_PORT_TLS); config.setClientLibraryVersionCheckEnabled(enableFilter); config.setAuthenticationEnabled(enableAuth); config.setAuthenticationProviders(providers); config.setAuthorizationEnabled(false); config.setSuperUserRoles(roles); config.setTlsEnabled(enableTls); config.setTlsCertificateFilePath(TLS_SERVER_CERT_FILE_PATH); config.setTlsKeyFilePath(TLS_SERVER_KEY_FILE_PATH); config.setTlsAllowInsecureConnection(allowInsecure); config.setTlsTrustCertsFilePath(allowInsecure ? "" : TLS_CLIENT_CERT_FILE_PATH); config.setClusterName("local"); config.setAdvertisedAddress("localhost"); // TLS certificate expects localhost config.setZookeeperServers("localhost:2181"); pulsar = spy(new PulsarService(config)); doReturn(new MockedZooKeeperClientFactoryImpl()).when(pulsar).getZooKeeperClientFactory(); doReturn(new MockedBookKeeperClientFactory()).when(pulsar).newBookKeeperClientFactory(); pulsar.start(); try { pulsar.getZkClient().delete("/minApiVersion", -1); } catch (Exception ex) { } pulsar.getZkClient().create("/minApiVersion", minApiVersion.getBytes(), null, CreateMode.PERSISTENT); String serviceUrl = BROKER_URL_BASE; PulsarAdminBuilder adminBuilder = PulsarAdmin.builder(); if (enableTls && enableAuth) { serviceUrl = BROKER_URL_BASE_TLS; Map<String, String> authParams = new HashMap<>(); authParams.put("tlsCertFile", TLS_CLIENT_CERT_FILE_PATH); authParams.put("tlsKeyFile", TLS_CLIENT_KEY_FILE_PATH); adminBuilder.authentication(AuthenticationTls.class.getName(), authParams).allowTlsInsecureConnection(true); } PulsarAdmin pulsarAdmin = adminBuilder.serviceHttpUrl(serviceUrl).build(); try { pulsarAdmin.clusters().createCluster(config.getClusterName(), new ClusterData(pulsar.getWebServiceAddress())); } catch (ConflictException ce) { // This is OK. } finally { pulsarAdmin.close(); } } @AfterMethod(alwaysRun = true) void teardown() throws Exception { try { pulsar.close(); } catch (Exception e) { Assert.fail("Got exception while closing the pulsar instance ", e); } } private static final Logger log = LoggerFactory.getLogger(WebServiceTest.class); }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ package fixtures.azurespecials.implementation; import retrofit2.Retrofit; import fixtures.azurespecials.ApiVersionDefaults; import com.google.common.reflect.TypeToken; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import fixtures.azurespecials.models.ErrorException; import java.io.IOException; import okhttp3.ResponseBody; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Query; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in ApiVersionDefaults. */ public class ApiVersionDefaultsImpl implements ApiVersionDefaults { /** The Retrofit service to perform REST calls. */ private ApiVersionDefaultsService service; /** The service client containing this operation class. */ private AutoRestAzureSpecialParametersTestClientImpl client; /** * Initializes an instance of ApiVersionDefaultsImpl. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public ApiVersionDefaultsImpl(Retrofit retrofit, AutoRestAzureSpecialParametersTestClientImpl client) { this.service = retrofit.create(ApiVersionDefaultsService.class); this.client = client; } /** * The interface defining all the services for ApiVersionDefaults to be * used by Retrofit to perform actually REST calls. */ interface ApiVersionDefaultsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.ApiVersionDefaults getMethodGlobalValid" }) @GET("azurespecials/apiVersion/method/string/none/query/global/2015-07-01-preview") Observable<Response<ResponseBody>> getMethodGlobalValid(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.ApiVersionDefaults getMethodGlobalNotProvidedValid" }) @GET("azurespecials/apiVersion/method/string/none/query/globalNotProvided/2015-07-01-preview") Observable<Response<ResponseBody>> getMethodGlobalNotProvidedValid(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.ApiVersionDefaults getPathGlobalValid" }) @GET("azurespecials/apiVersion/path/string/none/query/global/2015-07-01-preview") Observable<Response<ResponseBody>> getPathGlobalValid(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.ApiVersionDefaults getSwaggerGlobalValid" }) @GET("azurespecials/apiVersion/swagger/string/none/query/global/2015-07-01-preview") Observable<Response<ResponseBody>> getSwaggerGlobalValid(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void getMethodGlobalValid() { getMethodGlobalValidWithServiceResponseAsync().toBlocking().single().body(); } /** * GET method with api-version modeled in global settings. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> getMethodGlobalValidAsync(final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(getMethodGlobalValidWithServiceResponseAsync(), serviceCallback); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> getMethodGlobalValidAsync() { return getMethodGlobalValidWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> getMethodGlobalValidWithServiceResponseAsync() { if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.getMethodGlobalValid(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = getMethodGlobalValidDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> getMethodGlobalValidDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void getMethodGlobalNotProvidedValid() { getMethodGlobalNotProvidedValidWithServiceResponseAsync().toBlocking().single().body(); } /** * GET method with api-version modeled in global settings. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> getMethodGlobalNotProvidedValidAsync(final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(getMethodGlobalNotProvidedValidWithServiceResponseAsync(), serviceCallback); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> getMethodGlobalNotProvidedValidAsync() { return getMethodGlobalNotProvidedValidWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> getMethodGlobalNotProvidedValidWithServiceResponseAsync() { if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.getMethodGlobalNotProvidedValid(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = getMethodGlobalNotProvidedValidDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> getMethodGlobalNotProvidedValidDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void getPathGlobalValid() { getPathGlobalValidWithServiceResponseAsync().toBlocking().single().body(); } /** * GET method with api-version modeled in global settings. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> getPathGlobalValidAsync(final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(getPathGlobalValidWithServiceResponseAsync(), serviceCallback); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> getPathGlobalValidAsync() { return getPathGlobalValidWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> getPathGlobalValidWithServiceResponseAsync() { if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.getPathGlobalValid(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = getPathGlobalValidDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> getPathGlobalValidDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void getSwaggerGlobalValid() { getSwaggerGlobalValidWithServiceResponseAsync().toBlocking().single().body(); } /** * GET method with api-version modeled in global settings. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> getSwaggerGlobalValidAsync(final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(getSwaggerGlobalValidWithServiceResponseAsync(), serviceCallback); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> getSwaggerGlobalValidAsync() { return getSwaggerGlobalValidWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * GET method with api-version modeled in global settings. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> getSwaggerGlobalValidWithServiceResponseAsync() { if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.getSwaggerGlobalValid(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = getSwaggerGlobalValidDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> getSwaggerGlobalValidDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.server.rest; import java.text.DateFormat; import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.apache.drill.exec.proto.UserBitShared.CoreOperatorType; import org.apache.drill.exec.proto.UserBitShared.MajorFragmentProfile; import org.apache.drill.exec.proto.UserBitShared.MinorFragmentProfile; import org.apache.drill.exec.proto.UserBitShared.OperatorProfile; import org.apache.drill.exec.proto.UserBitShared.QueryProfile; import org.apache.drill.exec.proto.UserBitShared.StreamProfile; import org.apache.drill.exec.proto.helper.QueryIdHelper; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Collections2; import com.google.common.collect.Lists; import com.google.common.collect.Maps; public class ProfileWrapper { public QueryProfile profile; public String id; public ProfileWrapper(QueryProfile profile) { this.profile = profile; this.id = QueryIdHelper.getQueryId(profile.getId()); } public QueryProfile getProfile() { return profile; } public String getId() { return id; } public String getQueryId() { return QueryIdHelper.getQueryId(profile.getId()); } public List<OperatorWrapper> getOperatorProfiles() { List<OperatorWrapper> ows = Lists.newArrayList(); Map<ImmutablePair<Integer, Integer>, List<ImmutablePair<OperatorProfile, Integer>>> opmap = Maps.newHashMap(); List<MajorFragmentProfile> majors = new ArrayList<>(profile.getFragmentProfileList()); Collections.sort(majors, Comparators.majorIdCompare); for (MajorFragmentProfile major : majors) { List<MinorFragmentProfile> minors = new ArrayList<>(major.getMinorFragmentProfileList()); Collections.sort(minors, Comparators.minorIdCompare); for (MinorFragmentProfile minor : minors) { List<OperatorProfile> ops = new ArrayList<>(minor.getOperatorProfileList()); Collections.sort(ops, Comparators.operatorIdCompare); for (OperatorProfile op : ops) { ImmutablePair<Integer, Integer> ip = new ImmutablePair<>( major.getMajorFragmentId(), op.getOperatorId()); if (!opmap.containsKey(ip)) { List<ImmutablePair<OperatorProfile, Integer>> l = Lists.newArrayList(); opmap.put(ip, l); } opmap.get(ip).add(new ImmutablePair<>(op, minor.getMinorFragmentId())); } } } List<ImmutablePair<Integer, Integer>> keys = new ArrayList<>(opmap.keySet()); Collections.sort(keys); ImmutablePair<OperatorProfile, Integer> val; for (ImmutablePair<Integer, Integer> ip : keys) { ows.add(new OperatorWrapper(ip.getLeft(), opmap.get(ip))); } return ows; } public List<FragmentWrapper> getFragmentProfiles() { List<FragmentWrapper> fws = Lists.newArrayList(); List<MajorFragmentProfile> majors = new ArrayList<>(profile.getFragmentProfileList()); Collections.sort(majors, Comparators.majorIdCompare); for (MajorFragmentProfile major : majors) { fws.add(new FragmentWrapper(major)); } return fws; } public String getFragmentsOverview() { final String[] columns = {"Major Fragment", "Minor Fragments Reporting", "First Start", "Last Start", "First End", "Last End", "tmin", "tavg", "tmax"}; TableBuilder tb = new TableBuilder(columns); for (FragmentWrapper fw : getFragmentProfiles()) { fw.addSummary(tb); } return tb.toString(); } public String majorFragmentTimingProfile(MajorFragmentProfile major) { final String[] columns = {"Minor Fragment", "Start", "End", "Total Time", "Max Records", "Max Batches"}; TableBuilder builder = new TableBuilder(columns); ArrayList<MinorFragmentProfile> complete, incomplete; complete = new ArrayList<MinorFragmentProfile>(Collections2.filter( major.getMinorFragmentProfileList(), Filters.hasOperatorsAndTimes)); incomplete = new ArrayList<MinorFragmentProfile>(Collections2.filter( major.getMinorFragmentProfileList(), Filters.missingOperatorsOrTimes)); Collections.sort(complete, Comparators.minorIdCompare); for (MinorFragmentProfile minor : complete) { ArrayList<OperatorProfile> ops = new ArrayList<OperatorProfile>(minor.getOperatorProfileList()); long t0 = profile.getStart(); long biggestIncomingRecords = 0; long biggestBatches = 0; for (OperatorProfile op : ops) { long incomingRecords = 0; long batches = 0; for (StreamProfile sp : op.getInputProfileList()) { incomingRecords += sp.getRecords(); batches += sp.getBatches(); } biggestIncomingRecords = Math.max(biggestIncomingRecords, incomingRecords); biggestBatches = Math.max(biggestBatches, batches); } builder.appendCell(new OperatorPathBuilder().setMajor(major).setMinor(minor).build(), null); builder.appendMillis(minor.getStartTime() - t0, null); builder.appendMillis(minor.getEndTime() - t0, null); builder.appendMillis(minor.getEndTime() - minor.getStartTime(), null); builder.appendInteger(biggestIncomingRecords, null); builder.appendInteger(biggestBatches, null); } for (MinorFragmentProfile m : incomplete) { builder.appendCell( major.getMajorFragmentId() + "-" + m.getMinorFragmentId(), null); builder.appendRepeated(m.getState().toString(), null, 5); } return builder.toString(); } public String getOperatorsOverview() { final String [] columns = {"Operator", "Type", "Setup (min)", "Setup (avg)", "Setup (max)", "Process (min)", "Process (avg)", "Process (max)", "Wait (min)", "Wait (avg)", "Wait (max)"}; TableBuilder tb = new TableBuilder(columns); for (OperatorWrapper ow : getOperatorProfiles()) { ow.addSummary(tb); } return tb.toString(); } public String getOperatorsJSON() { StringBuilder sb = new StringBuilder("{"); String sep = ""; for (CoreOperatorType op : CoreOperatorType.values()) { sb.append(String.format("%s\"%d\" : \"%s\"", sep, op.ordinal(), op)); sep = ", "; } return sb.append("}").toString(); } private static class OperatorPathBuilder { private static final String OPERATOR_PATH_PATTERN = "%s-%s-%s"; private static final String DEFAULT = "xx"; private String major; private String minor; private String operator; public OperatorPathBuilder() { clear(); } public void clear() { major = DEFAULT; minor = DEFAULT; operator = DEFAULT; } // Utility to left pad strings protected String leftPad(String text) { return String.format("00%s", text).substring(text.length()); } public OperatorPathBuilder setMajor(MajorFragmentProfile major) { if (major!=null) { return setMajor(major.getMajorFragmentId()); } return this; } public OperatorPathBuilder setMajor(int newMajor) { major = leftPad(String.valueOf(newMajor)); return this; } public OperatorPathBuilder setMinor(MinorFragmentProfile minor) { if (minor!=null) { return setMinor(minor.getMinorFragmentId()); } return this; } public OperatorPathBuilder setMinor(int newMinor) { minor = leftPad(String.valueOf(newMinor)); return this; } public OperatorPathBuilder setOperator(OperatorProfile op) { if (op!=null) { return setOperator(op.getOperatorId()); } return this; } public OperatorPathBuilder setOperator(int newOp) { operator = leftPad(String.valueOf(newOp)); return this; } public String build() { StringBuffer sb = new StringBuffer(); return sb.append(major).append("-") .append(minor).append("-") .append(operator) .toString(); } } public class FragmentWrapper { private final MajorFragmentProfile major; public FragmentWrapper(MajorFragmentProfile major) { this.major = Preconditions.checkNotNull(major); } public String getDisplayName() { return String.format("Major Fragment: %s", new OperatorPathBuilder().setMajor(major).build()); } public String getId() { return String.format("fragment-%s", major.getMajorFragmentId()); } public void addSummary(TableBuilder tb) { final String fmt = " (%d)"; long t0 = profile.getStart(); ArrayList<MinorFragmentProfile> complete = new ArrayList<MinorFragmentProfile>( Collections2.filter(major.getMinorFragmentProfileList(), Filters.hasOperatorsAndTimes)); tb.appendCell(new OperatorPathBuilder().setMajor(major).build(), null); tb.appendCell(complete.size() + " / " + major.getMinorFragmentProfileCount(), null); if (complete.size() < 1) { tb.appendRepeated("", null, 7); return; } int li = complete.size() - 1; Collections.sort(complete, Comparators.startTimeCompare); tb.appendMillis(complete.get(0).getStartTime() - t0, String.format(fmt, complete.get(0).getMinorFragmentId())); tb.appendMillis(complete.get(li).getStartTime() - t0, String.format(fmt, complete.get(li).getMinorFragmentId())); Collections.sort(complete, Comparators.endTimeCompare); tb.appendMillis(complete.get(0).getEndTime() - t0, String.format(fmt, complete.get(0).getMinorFragmentId())); tb.appendMillis(complete.get(li).getEndTime() - t0, String.format(fmt, complete.get(li).getMinorFragmentId())); long total = 0; for (MinorFragmentProfile p : complete) { total += p.getEndTime() - p.getStartTime(); } Collections.sort(complete, Comparators.runTimeCompare); tb.appendMillis(complete.get(0).getEndTime() - complete.get(0).getStartTime(), String.format(fmt, complete.get(0).getMinorFragmentId())); tb.appendMillis((long) (total / complete.size()), null); tb.appendMillis(complete.get(li).getEndTime() - complete.get(li).getStartTime(), String.format(fmt, complete.get(li).getMinorFragmentId())); } public String getContent() { return majorFragmentTimingProfile(major); } } public class OperatorWrapper { private final int major; private List<ImmutablePair<OperatorProfile, Integer>> ops; public OperatorWrapper(int major, List<ImmutablePair<OperatorProfile, Integer>> ops) { assert ops.size() > 0; this.major = major; this.ops = ops; } public String getDisplayName() { OperatorProfile op = ops.get(0).getLeft(); String path = new OperatorPathBuilder().setMajor(major).setOperator(op).build(); CoreOperatorType operatorType = CoreOperatorType.valueOf(op.getOperatorType()); return String.format("%s - %s", path, operatorType == null ? "UKNOWN_OPERATOR" : operatorType.toString()); } public String getId() { return String.format("operator-%d-%d", major, ops.get(0).getLeft().getOperatorId()); } public String getContent() { final String [] columns = {"Minor Fragment", "Setup", "Process", "Wait", "Max Batches", "Max Records"}; TableBuilder builder = new TableBuilder(columns); for (ImmutablePair<OperatorProfile, Integer> ip : ops) { int minor = ip.getRight(); OperatorProfile op = ip.getLeft(); String path = new OperatorPathBuilder().setMajor(major).setMinor(minor).setOperator(op).build(); builder.appendCell(path, null); builder.appendNanos(op.getSetupNanos(), null); builder.appendNanos(op.getProcessNanos(), null); builder.appendNanos(op.getWaitNanos(), null); long maxBatches = Long.MIN_VALUE; long maxRecords = Long.MIN_VALUE; for (StreamProfile sp : op.getInputProfileList()) { maxBatches = Math.max(sp.getBatches(), maxBatches); maxRecords = Math.max(sp.getRecords(), maxRecords); } builder.appendInteger(maxBatches, null); builder.appendInteger(maxRecords, null); } return builder.toString(); } public void addSummary(TableBuilder tb) { OperatorProfile op = ops.get(0).getLeft(); String path = new OperatorPathBuilder().setMajor(major).setOperator(op).build(); tb.appendCell(path, null); CoreOperatorType operatorType = CoreOperatorType.valueOf(ops.get(0).getLeft().getOperatorType()); tb.appendCell(operatorType == null ? "UNKNOWN_OPERATOR" : operatorType.toString(), null); int li = ops.size() - 1; String fmt = " (%s)"; double setupSum = 0.0; double processSum = 0.0; double waitSum = 0.0; for (ImmutablePair<OperatorProfile, Integer> ip : ops) { setupSum += ip.getLeft().getSetupNanos(); processSum += ip.getLeft().getProcessNanos(); waitSum += ip.getLeft().getWaitNanos(); } Collections.sort(ops, Comparators.setupTimeSort); tb.appendNanos(ops.get(0).getLeft().getSetupNanos(), String.format(fmt, ops.get(0).getRight())); tb.appendNanos((long) (setupSum / ops.size()), null); tb.appendNanos(ops.get(li).getLeft().getSetupNanos(), String.format(fmt, ops.get(li).getRight())); Collections.sort(ops, Comparators.processTimeSort); tb.appendNanos(ops.get(0).getLeft().getProcessNanos(), String.format(fmt, ops.get(0).getRight())); tb.appendNanos((long) (processSum / ops.size()), null); tb.appendNanos(ops.get(li).getLeft().getProcessNanos(), String.format(fmt, ops.get(li).getRight())); Collections.sort(ops, Comparators.waitTimeSort); tb.appendNanos(ops.get(0).getLeft().getWaitNanos(), String.format(fmt, ops.get(0).getRight())); tb.appendNanos((long) (waitSum / ops.size()), null); tb.appendNanos(ops.get(li).getLeft().getWaitNanos(), String.format(fmt, ops.get(li).getRight())); } } static class Comparators { final static Comparator<MajorFragmentProfile> majorIdCompare = new Comparator<MajorFragmentProfile>() { public int compare(MajorFragmentProfile o1, MajorFragmentProfile o2) { return Long.compare(o1.getMajorFragmentId(), o2.getMajorFragmentId()); } }; final static Comparator<MinorFragmentProfile> minorIdCompare = new Comparator<MinorFragmentProfile>() { public int compare(MinorFragmentProfile o1, MinorFragmentProfile o2) { return Long.compare(o1.getMinorFragmentId(), o2.getMinorFragmentId()); } }; final static Comparator<MinorFragmentProfile> startTimeCompare = new Comparator<MinorFragmentProfile>() { public int compare(MinorFragmentProfile o1, MinorFragmentProfile o2) { return Long.compare(o1.getStartTime(), o2.getStartTime()); } }; final static Comparator<MinorFragmentProfile> endTimeCompare = new Comparator<MinorFragmentProfile>() { public int compare(MinorFragmentProfile o1, MinorFragmentProfile o2) { return Long.compare(o1.getEndTime(), o2.getEndTime()); } }; final static Comparator<MinorFragmentProfile> runTimeCompare = new Comparator<MinorFragmentProfile>() { public int compare(MinorFragmentProfile o1, MinorFragmentProfile o2) { return Long.compare(o1.getEndTime() - o1.getStartTime(), o2.getEndTime() - o2.getStartTime()); } }; final static Comparator<OperatorProfile> operatorIdCompare = new Comparator<OperatorProfile>() { public int compare(OperatorProfile o1, OperatorProfile o2) { return Long.compare(o1.getOperatorId(), o2.getOperatorId()); } }; final static Comparator<Pair<OperatorProfile, Integer>> setupTimeSort = new Comparator<Pair<OperatorProfile, Integer>>() { public int compare(Pair<OperatorProfile, Integer> o1, Pair<OperatorProfile, Integer> o2) { return Long.compare(o1.getLeft().getSetupNanos(), o2.getLeft().getSetupNanos()); } }; final static Comparator<Pair<OperatorProfile, Integer>> processTimeSort = new Comparator<Pair<OperatorProfile, Integer>>() { public int compare(Pair<OperatorProfile, Integer> o1, Pair<OperatorProfile, Integer> o2) { return Long.compare(o1.getLeft().getProcessNanos(), o2.getLeft().getProcessNanos()); } }; final static Comparator<Pair<OperatorProfile, Integer>> waitTimeSort = new Comparator<Pair<OperatorProfile, Integer>>() { public int compare(Pair<OperatorProfile, Integer> o1, Pair<OperatorProfile, Integer> o2) { return Long.compare(o1.getLeft().getWaitNanos(), o2.getLeft().getWaitNanos()); } }; } private static class Filters { final static Predicate<MinorFragmentProfile> hasOperators = new Predicate<MinorFragmentProfile>() { public boolean apply(MinorFragmentProfile arg0) { return arg0.getOperatorProfileCount() != 0; } }; final static Predicate<MinorFragmentProfile> hasTimes = new Predicate<MinorFragmentProfile>() { public boolean apply(MinorFragmentProfile arg0) { return arg0.hasStartTime() && arg0.hasEndTime(); } }; final static Predicate<MinorFragmentProfile> hasOperatorsAndTimes = Predicates.and(Filters.hasOperators, Filters.hasTimes); final static Predicate<MinorFragmentProfile> missingOperatorsOrTimes = Predicates.not(hasOperatorsAndTimes); } class TableBuilder { NumberFormat format = NumberFormat.getInstance(Locale.US); DateFormat dateFormat = new SimpleDateFormat("HH:mm:ss.SSS"); StringBuilder sb; int w = 0; int width; public TableBuilder(String[] columns) { sb = new StringBuilder(); width = columns.length; format.setMaximumFractionDigits(3); format.setMinimumFractionDigits(3); sb.append("<table class=\"table table-bordered text-right\">\n<tr>"); for (String cn : columns) { sb.append("<th>" + cn + "</th>"); } sb.append("</tr>\n"); } public void appendCell(String s, String link) { if (w == 0) { sb.append("<tr>"); } sb.append(String.format("<td>%s%s</td>", s, link != null ? link : "")); if (++w >= width) { sb.append("</tr>\n"); w = 0; } } public void appendRepeated(String s, String link, int n) { for (int i = 0; i < n; i++) { appendCell(s, link); } } public void appendTime(long d, String link) { appendCell(dateFormat.format(d), link); } public void appendMillis(long p, String link) { appendCell(format.format(p / 1000.0), link); } public void appendNanos(long p, String link) { appendMillis((long) (p / 1000.0 / 1000.0), link); } public void appendFormattedNumber(Number n, String link) { appendCell(format.format(n), link); } public void appendInteger(long l, String link) { appendCell(Long.toString(l), link); } @Override public String toString() { String rv; rv = sb.append("\n</table>").toString(); sb = null; return rv; } } }
package io.itwapp.rest; import com.google.gson.JsonElement; import com.google.gson.JsonParser; import com.google.gson.JsonSyntaxException; import com.ning.http.client.AsyncHttpClient; import com.ning.http.client.Response; import io.itwapp.Itwapp; import io.itwapp.exception.InvalidRequestError; import io.itwapp.exception.ResourceNotFoundException; import io.itwapp.exception.UnauthorizedException; import org.junit.BeforeClass; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import static org.junit.Assert.*; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class ApiRequestTest { private static String interviewId = null; @BeforeClass public static void setUpBeforeClass() { Itwapp.apiKey = System.getenv("itwappApiKey"); Itwapp.secretKey = System.getenv("itwappApiSecret"); } @Test public void a_testSignRequestWithoutQueryStringParam() { Method m = null; try { m = ApiRequest.class.getDeclaredMethod("sign", String.class, String.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); String result = null; try { result = (String) m.invoke(null, "GET", "/api/v1/test/"); } catch (IllegalAccessException | InvocationTargetException e) { fail(); } assertTrue(result.matches("/api/v1/test/\\?apiKey=.*&timestamp=.*&signature=.*")); } @Test public void b_testSignRequestWithQueryStringParam() { Method m = null; try { m = ApiRequest.class.getDeclaredMethod("sign", String.class, String.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); String result = null; try { result = (String) m.invoke(null, "GET", "/api/v1/test/?foo=bar"); } catch (IllegalAccessException | InvocationTargetException e) { fail(); } assertTrue(result.matches("/api/v1/test/\\?foo=bar&apiKey=.*&timestamp=.*&signature=.*")); } @Test(expected = UnauthorizedException.class) public void c_testParseResultWithUnauthorizedException() throws Throwable { Method m = null; try { m = ApiRequest.class.getDeclaredMethod("parseResult", Response.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); AsyncHttpClient asyncHttpClient = new AsyncHttpClient(); Response r = null; try { Future<Response> f = asyncHttpClient.prepareGet(Itwapp.getApiBase() + "/api/v1/applicant/12").execute(); r = f.get(); } catch (InterruptedException | ExecutionException e) { fail(); } // Service should respond not authorized assertEquals(401, r.getStatusCode()); invokeAndRaiseExceptionOrFail(m, r); } @Test(expected = ResourceNotFoundException.class) public void c_testParseResultWithNotFoundException() throws Throwable { Method m = null; try { m = ApiRequest.class.getDeclaredMethod("parseResult", Response.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); AsyncHttpClient asyncHttpClient = new AsyncHttpClient(); Response r = null; try { Future<Response> f = asyncHttpClient.prepareGet(Itwapp.getApiBase() + "/api/v1/not_found_page").execute(); r = f.get(); } catch (InterruptedException | ExecutionException e) { fail(); } // Service should respond not authorized assertEquals(404, r.getStatusCode()); invokeAndRaiseExceptionOrFail(m, r); } @Test(expected = InvalidRequestError.class) public void c_testParseResultWithBadRequestException() throws Throwable { Method m = null; try { m = ApiRequest.class.getDeclaredMethod("sign", String.class, String.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); String result = null; try { result = (String) m.invoke(null, "POST", "/api/v1/applicant/"); } catch (IllegalAccessException | InvocationTargetException e) { fail(); } assertTrue(result.matches("/api/v1/applicant/\\?apiKey=.*&timestamp=.*&signature=.*")); m = null; try { m = ApiRequest.class.getDeclaredMethod("parseResult", Response.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); AsyncHttpClient asyncHttpClient = new AsyncHttpClient(); Response r = null; try { Future<Response> f = asyncHttpClient.preparePost(Itwapp.getApiBase() + result) .setHeader("Content-Type", "application/json") .execute(); r = f.get(); } catch (InterruptedException | ExecutionException e) { fail(); } // Service should respond not authorized assertEquals(400, r.getStatusCode()); invokeAndRaiseExceptionOrFail(m, r); } @Test public void d_testParseResultWithNormalResult() { Method m = null; try { m = ApiRequest.class.getDeclaredMethod("sign", String.class, String.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); String result = null; try { result = (String) m.invoke(null, "GET", "/api/v1/interview/"); } catch (IllegalAccessException | InvocationTargetException e) { fail(); } assertTrue(result.matches("/api/v1/interview/\\?apiKey=.*&timestamp=.*&signature=.*")); m = null; try { m = ApiRequest.class.getDeclaredMethod("parseResult", Response.class); } catch (NoSuchMethodException e) { fail(); } m.setAccessible(true); AsyncHttpClient asyncHttpClient = new AsyncHttpClient(); Response r = null; try { Future<Response> f = asyncHttpClient.prepareGet(Itwapp.getApiBase() + result).execute(); r = f.get(); } catch (InterruptedException | ExecutionException e) { fail(); } // Service should respond not authorized assertEquals(200, r.getStatusCode()); try { String json = (String) m.invoke(null, r); JsonElement element = new JsonParser().parse(json); assertTrue(element.isJsonArray()); } catch (IllegalAccessException | InvocationTargetException | JsonSyntaxException e) { fail(); } } @Test public void e_testGetRequest() { String res = ApiRequest.get("/api/v1/interview/"); JsonElement element = new JsonParser().parse(res); assertTrue(element.isJsonArray()); } @Test public void f_testPostRequest() { Map<String, Object> param = new HashMap<>(); param.put("name", "interview 1"); param.put("video", ""); param.put("text", ""); Map<String, Object> question = new HashMap<>(); question.put("content", "question 1"); question.put("readingTime", 60); question.put("answerTime", 60); question.put("number", 1); List<Map<String, Object>> questions = new ArrayList<>(); questions.add(question); param.put("questions", questions); String res = ApiRequest.post("/api/v1/interview/", param); JsonElement element = new JsonParser().parse(res); assertTrue(element.isJsonObject()); assertTrue(element.getAsJsonObject().has("_id")); ApiRequestTest.interviewId = element.getAsJsonObject().get("_id").getAsString(); } @Test public void g_testPutRequest() { assertNotNull(ApiRequestTest.interviewId); Map<String, Object> param = new HashMap<>(); param.put("name", "interview 1"); param.put("video", ""); param.put("text", ""); Map<String, Object> question = new HashMap<>(); question.put("content", "question 1 - Updated"); question.put("readingTime", 60); question.put("answerTime", 60); question.put("number", 1); List<Map<String, Object>> questions = new ArrayList<>(); questions.add(question); param.put("questions", questions); String res = ApiRequest.put("/api/v1/interview/" + ApiRequestTest.interviewId, param); JsonElement element = new JsonParser().parse(res); assertTrue(element.isJsonObject()); } @Test public void h_testDeleteRequest() { assertNotNull(ApiRequestTest.interviewId); String res = ApiRequest.delete("/api/v1/interview/" + ApiRequestTest.interviewId); assertEquals("", res); } private void invokeAndRaiseExceptionOrFail(Method m, Response r) throws Throwable { try { m.invoke(null, r); fail(); } catch (IllegalAccessException e) { fail(); } catch (InvocationTargetException e) { throw e.getCause(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. * */ package org.apache.geode.redis.internal.netty; import java.util.ArrayList; import java.util.List; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.ByteToMessageDecoder; import org.apache.geode.redis.internal.statistics.RedisStats; /** * This is the first part of the channel pipeline for Netty. Here incoming bytes are read and a * created {@link Command} is sent down the pipeline. It is unfortunate that this class is not * {@link io.netty.channel.ChannelHandler.Sharable} because no state is kept in this class. State is * kept by {@link ByteToMessageDecoder}, it may be worthwhile to look at a different decoder setup * as to avoid allocating a decoder for every new connection. * <p> * The code flow of the protocol parsing may not be exactly Java like, but this is done very * intentionally. It was found that in cases where large Redis requests are sent that end up being * fragmented, throwing exceptions when the command could not be fully parsed took up an enormous * amount of cpu time. The simplicity of the Redis protocol allows us to just back out and wait for * more data, while exceptions are left to malformed requests which should never happen if using a * proper Redis client. */ public class ByteToCommandDecoder extends ByteToMessageDecoder { /** * Important note * <p> * Do not use '' <-- java primitive chars. Redis uses {@link Coder#CHARSET} encoding so we should * not risk java handling char to byte conversions, rather just hard code {@link Coder#CHARSET} * chars as bytes */ private static final byte rID = 13; // '\r'; private static final byte nID = 10; // '\n'; private static final byte bulkStringID = 36; // '$'; private static final byte arrayID = 42; // '*'; private static final int MAX_BULK_STRING_LENGTH = 512 * 1024 * 1024; // 512 MB private final RedisStats redisStats; public ByteToCommandDecoder(RedisStats redisStats) { this.redisStats = redisStats; } @Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { Command c; long bytesRead = 0; do { int startReadIndex = in.readerIndex(); c = parse(in); if (c == null) { in.readerIndex(startReadIndex); break; } bytesRead += in.readerIndex() - startReadIndex; out.add(c); } while (in.isReadable()); // Try to take advantage of pipelining if it is being used redisStats.incNetworkBytesRead(bytesRead); } private Command parse(ByteBuf buffer) throws RedisCommandParserException { if (buffer == null) { throw new NullPointerException(); } if (!buffer.isReadable()) { return null; } byte firstB = buffer.readByte(); if (firstB != arrayID) { throw new RedisCommandParserException( "Expected: " + (char) arrayID + " Actual: " + (char) firstB); } List<byte[]> commandElems = parseArray(buffer); if (commandElems == null) { return null; } return new Command(commandElems); } private List<byte[]> parseArray(ByteBuf buffer) throws RedisCommandParserException { byte currentChar; int arrayLength = parseCurrentNumber(buffer); if (arrayLength == Integer.MIN_VALUE || !parseRN(buffer)) { return null; } if (arrayLength < 0 || arrayLength > 1000000000) { throw new RedisCommandParserException("invalid multibulk length"); } List<byte[]> commandElems = new ArrayList<>(arrayLength); for (int i = 0; i < arrayLength; i++) { if (!buffer.isReadable()) { return null; } currentChar = buffer.readByte(); if (currentChar == bulkStringID) { byte[] newBulkString = parseBulkString(buffer); if (newBulkString == null) { return null; } commandElems.add(newBulkString); } else { throw new RedisCommandParserException( "expected: \'$\', got \'" + (char) currentChar + "\'"); } } return commandElems; } /** * Helper method to parse a bulk string when one is seen * * @param buffer Buffer to read from * @return byte[] representation of the Bulk String read * @throws RedisCommandParserException Thrown when there is illegal syntax */ private byte[] parseBulkString(ByteBuf buffer) throws RedisCommandParserException { int bulkStringLength = parseCurrentNumber(buffer); if (bulkStringLength == Integer.MIN_VALUE) { return null; } if (bulkStringLength > MAX_BULK_STRING_LENGTH) { throw new RedisCommandParserException( "invalid bulk length, cannot exceed max length of " + MAX_BULK_STRING_LENGTH); } if (!parseRN(buffer)) { return null; } if (!buffer.isReadable(bulkStringLength)) { return null; } byte[] bulkString = new byte[bulkStringLength]; buffer.readBytes(bulkString); if (!parseRN(buffer)) { return null; } return bulkString; } /** * Helper method to parse the number at the beginning of the buffer * * @param buffer Buffer to read * @return The number found at the beginning of the buffer */ private int parseCurrentNumber(ByteBuf buffer) { int number = 0; int readerIndex = buffer.readerIndex(); byte b = 0; while (true) { if (!buffer.isReadable()) { return Integer.MIN_VALUE; } b = buffer.readByte(); if (Character.isDigit(b)) { number = number * 10 + (int) (b - '0'); readerIndex++; } else { buffer.readerIndex(readerIndex); break; } } return number; } /** * Helper method that is called when the next characters are supposed to be "\r\n" * * @param buffer Buffer to read from * @throws RedisCommandParserException Thrown when the next two characters are not "\r\n" */ private boolean parseRN(ByteBuf buffer) throws RedisCommandParserException { if (!buffer.isReadable(2)) { return false; } byte b = buffer.readByte(); if (b != rID) { throw new RedisCommandParserException( "expected \'" + (char) rID + "\', got \'" + (char) b + "\'"); } b = buffer.readByte(); if (b != nID) { throw new RedisCommandParserException( "expected: \'" + (char) nID + "\', got \'" + (char) b + "\'"); } return true; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package br.com.minaciolog.gerenciador.dao; import br.com.minaciolog.gerenciador.beans.Cliente; import br.com.minaciolog.gerenciador.beans.EnderecoCliente; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; public class EnderecoClienteDAO implements DAO<EnderecoCliente> { BancoDados bd = new BancoDados(); @Override public void Incluir(EnderecoCliente obj) throws SQLException { try { bd.conectar(); String strSql = "INSERT INTO endereco_cliente (ENDE_COMPLEMENTO, ENDE_BAIRRO, " + "ENDE_CEP, ENDE_NUMERO, ENDE_LOGRADOURO, CLIENTE_CLIE_ID, TIPO_ENDERECO_TIEN_ID," + "CIDADE_CIDA_ID) VALUES (?,?,?,?,?,?,?,?)"; PreparedStatement p = bd.connection.prepareStatement(strSql); p.setString(1, obj.getComplemento()); p.setString(2, obj.getBairro()); p.setString(3, obj.getCep()); p.setString(4, obj.getNumero()); p.setString(5, obj.getLogradouro()); p.setInt(6, obj.getCodigoCliente()); p.setInt(7, obj.getCodigoTipo()); p.setInt(8, obj.getCodigoCidade()); p.execute(); p.close(); bd.desconectar(); } catch (SQLException ex) { bd.desconectar(); throw ex; } } @Override public void Excluir(int codigo) throws SQLException { try { bd.conectar(); String strSql = "DELETE FROM endereco_cliente WHERE ENDE_ID = ?"; PreparedStatement p = bd.connection.prepareStatement(strSql); p.setInt(1, codigo); p.execute(); p.close(); bd.desconectar(); } catch (SQLException ex) { bd.desconectar(); throw ex; } } @Override public void Alterar(EnderecoCliente obj) throws SQLException { try { bd.conectar(); String strSql = "UPDATE endereco_cliente SET ENDE_COMPLEMENTO = ?, ENDE_BAIRRO = ?, " + "ENDE_CEP = ?, ENDE_NUMERO = ?, ENDE_LOGRADOURO = ?, CLIENTE_CLIE_ID = ?, TIPO_ENDERECO_TIEN_ID = ?," + "CIDADE_CIDA_ID = ? WHERE ENDE_ID = ?"; PreparedStatement p = bd.connection.prepareStatement(strSql); p.setString(1, obj.getComplemento()); p.setString(2, obj.getBairro()); p.setString(3, obj.getCep()); p.setString(4, obj.getNumero()); p.setString(5, obj.getLogradouro()); p.setInt(6, obj.getCodigoCliente()); p.setInt(7, obj.getCodigoTipo()); p.setInt(8, obj.getCodigoCidade()); p.setInt(9, obj.getCodigo()); p.execute(); p.close(); bd.desconectar(); } catch (SQLException ex) { bd.desconectar(); throw ex; } } @Override public ArrayList<EnderecoCliente> Consultar() throws SQLException { try { ArrayList<EnderecoCliente> lista = new ArrayList<>(); bd.conectar(); Statement comando; comando = bd.connection.createStatement(); ResultSet rs = comando.executeQuery("SELECT (ENDE_COMPLEMENTO, ENDE_BAIRRO, " + "ENDE_CEP, ENDE_NUMERO, ENDE_LOGRADOURO, CLIENTE_CLIE_ID, TIPO_ENDERECO_TIEN_ID, " + "CIDADE_CIDA_ID, ENDE_ID FROM endereco_cliente"); while (rs.next()) { EnderecoCliente obj = new EnderecoCliente(); obj.setCodigo(rs.getInt("ENDE_ID")); obj.setBairro(rs.getString("ENDE_BAIRRO")); obj.setCep(rs.getString("ENDE_CEP")); obj.setCodigoCidade(rs.getInt("CIDADE_CIDA_ID")); obj.setCodigoCliente(rs.getInt("CLIENTE_CLIE_ID")); obj.setCodigoTipo(rs.getInt("TIPO_ENDERECO_TIEN_ID")); obj.setComplemento(rs.getString("ENDE_COMPLEMENTO")); obj.setLogradouro(rs.getString("ENDE_LOGRADOURO")); obj.setNumero(rs.getString("ENDE_NUMERO")); lista.add(obj); } bd.desconectar(); return lista; } catch (SQLException ex) { bd.desconectar(); throw ex; } } @Override public EnderecoCliente Consultar(int codigo) throws SQLException { try { EnderecoCliente obj = null; bd.conectar(); String strSQL = "SELECT ENDE_COMPLEMENTO, ENDE_BAIRRO, " + "ENDE_CEP, ENDE_NUMERO, ENDE_LOGRADOURO, CLIENTE_CLIE_ID, TIPO_ENDERECO_TIEN_ID, " + "CIDADE_CIDA_ID, ENDE_ID FROM endereco_cliente WHERE ENDE_ID = ?"; PreparedStatement p = bd.connection.prepareStatement(strSQL); p.setInt(1, codigo); ResultSet rs = p.executeQuery(); if (rs.next()) { obj = new EnderecoCliente(); obj.setCodigo(rs.getInt("ENDE_ID")); obj.setBairro(rs.getString("ENDE_BAIRRO")); obj.setCep(rs.getString("ENDE_CEP")); obj.setCodigoCidade(rs.getInt("CIDADE_CIDA_ID")); obj.setCodigoCliente(rs.getInt("CLIENTE_CLIE_ID")); obj.setCodigoTipo(rs.getInt("TIPO_ENDERECO_TIEN_ID")); obj.setComplemento(rs.getString("ENDE_COMPLEMENTO")); obj.setLogradouro(rs.getString("ENDE_LOGRADOURO")); obj.setNumero(rs.getString("ENDE_NUMERO")); bd.desconectar(); return obj; } bd.desconectar(); return obj; } catch (SQLException ex) { bd.desconectar(); throw ex; } } public ArrayList<EnderecoCliente> ConsultarCliente(int codigo) throws SQLException { try { ArrayList<EnderecoCliente> lista = new ArrayList<>(); bd.conectar(); String strSQL = "SELECT " + "A.ENDE_COMPLEMENTO, " + "A.ENDE_BAIRRO, " + "A.ENDE_CEP, " + "A.ENDE_NUMERO, " + "A.ENDE_LOGRADOURO, " + "A.CLIENTE_CLIE_ID, " + "A.TIPO_ENDERECO_TIEN_ID, " + "A.CIDADE_CIDA_ID, " + "A.ENDE_ID " + "B.CIDA_DESC" + "C.TIEN_DESC" + "FROM endereco_cliente A" + "JOIN cidade B" + "ON A.CIDADE_CIDA_ID = B.CIDA_ID" + "JOIN tipo_endereco C" + "ON A.TIPO_ENDERECO_TIEN_ID = C.TIEN_ID" + "WHERE A.CLIENTE_CLIE_ID = ?"; PreparedStatement p = bd.connection.prepareStatement(strSQL); p.setInt(1, codigo); ResultSet rs = p.executeQuery(); while (rs.next()) { EnderecoCliente obj = new EnderecoCliente(); obj.setCodigo(rs.getInt("ENDE_ID")); obj.setBairro(rs.getString("ENDE_BAIRRO")); obj.setCep(rs.getString("ENDE_CEP")); obj.setCodigoCidade(rs.getInt("CIDADE_CIDA_ID")); obj.setCodigoCliente(rs.getInt("CLIENTE_CLIE_ID")); obj.setCodigoTipo(rs.getInt("TIPO_ENDERECO_TIEN_ID")); obj.setComplemento(rs.getString("ENDE_COMPLEMENTO")); obj.setLogradouro(rs.getString("ENDE_LOGRADOURO")); obj.setNumero(rs.getString("ENDE_NUMERO")); obj.setTipo(rs.getString("TIEN_DESC")); obj.setCidade(rs.getString("CIDA_DESC")); lista.add(obj); } bd.desconectar(); return lista; } catch (SQLException ex) { bd.desconectar(); throw ex; } } public void ExcluirCliente(int codigo) throws SQLException { try { bd.conectar(); String strSql = "DELETE FROM endereco_cliente WHERE CLIENTE_CLIE_ID = ?"; PreparedStatement p = bd.connection.prepareStatement(strSql); p.setInt(1, codigo); p.execute(); p.close(); bd.desconectar(); } catch (SQLException ex) { bd.desconectar(); throw ex; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.monitor.query.implementation.metricsdefinitions.models; import com.azure.core.annotation.Fluent; import com.azure.monitor.query.models.AggregationType; import com.azure.monitor.query.models.MetricClass; import com.azure.monitor.query.models.MetricUnit; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; /** Metric definition class specifies the metadata for a metric. */ @Fluent public final class MetricDefinition { /* * Flag to indicate whether the dimension is required. */ @JsonProperty(value = "isDimensionRequired") private Boolean isDimensionRequired; /* * the resource identifier of the resource that emitted the metric. */ @JsonProperty(value = "resourceId") private String resourceId; /* * the namespace the metric belongs to. */ @JsonProperty(value = "namespace") private String namespace; /* * the name and the display name of the metric, i.e. it is a localizable * string. */ @JsonProperty(value = "name") private LocalizableString name; /* * Detailed description of this metric. */ @JsonProperty(value = "displayDescription") private String displayDescription; /* * Custom category name for this metric. */ @JsonProperty(value = "category") private String category; /* * The class of the metric. */ @JsonProperty(value = "metricClass") private MetricClass metricClass; /* * The unit of the metric. */ @JsonProperty(value = "unit") private MetricUnit unit; /* * the primary aggregation type value defining how to use the values for * display. */ @JsonProperty(value = "primaryAggregationType") private AggregationType primaryAggregationType; /* * the collection of what aggregation types are supported. */ @JsonProperty(value = "supportedAggregationTypes") private List<AggregationType> supportedAggregationTypes; /* * the collection of what aggregation intervals are available to be * queried. */ @JsonProperty(value = "metricAvailabilities") private List<MetricAvailability> metricAvailabilities; /* * the resource identifier of the metric definition. */ @JsonProperty(value = "id") private String id; /* * the name and the display name of the dimension, i.e. it is a localizable * string. */ @JsonProperty(value = "dimensions") private List<LocalizableString> dimensions; /** * Get the isDimensionRequired property: Flag to indicate whether the dimension is required. * * @return the isDimensionRequired value. */ public Boolean isDimensionRequired() { return this.isDimensionRequired; } /** * Set the isDimensionRequired property: Flag to indicate whether the dimension is required. * * @param isDimensionRequired the isDimensionRequired value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setIsDimensionRequired(Boolean isDimensionRequired) { this.isDimensionRequired = isDimensionRequired; return this; } /** * Get the resourceId property: the resource identifier of the resource that emitted the metric. * * @return the resourceId value. */ public String getResourceId() { return this.resourceId; } /** * Set the resourceId property: the resource identifier of the resource that emitted the metric. * * @param resourceId the resourceId value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setResourceId(String resourceId) { this.resourceId = resourceId; return this; } /** * Get the namespace property: the namespace the metric belongs to. * * @return the namespace value. */ public String getNamespace() { return this.namespace; } /** * Set the namespace property: the namespace the metric belongs to. * * @param namespace the namespace value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setNamespace(String namespace) { this.namespace = namespace; return this; } /** * Get the name property: the name and the display name of the metric, i.e. it is a localizable string. * * @return the name value. */ public LocalizableString getName() { return this.name; } /** * Set the name property: the name and the display name of the metric, i.e. it is a localizable string. * * @param name the name value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setName(LocalizableString name) { this.name = name; return this; } /** * Get the displayDescription property: Detailed description of this metric. * * @return the displayDescription value. */ public String getDisplayDescription() { return this.displayDescription; } /** * Set the displayDescription property: Detailed description of this metric. * * @param displayDescription the displayDescription value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setDisplayDescription(String displayDescription) { this.displayDescription = displayDescription; return this; } /** * Get the category property: Custom category name for this metric. * * @return the category value. */ public String getCategory() { return this.category; } /** * Set the category property: Custom category name for this metric. * * @param category the category value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setCategory(String category) { this.category = category; return this; } /** * Get the metricClass property: The class of the metric. * * @return the metricClass value. */ public MetricClass getMetricClass() { return this.metricClass; } /** * Set the metricClass property: The class of the metric. * * @param metricClass the metricClass value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setMetricClass(MetricClass metricClass) { this.metricClass = metricClass; return this; } /** * Get the unit property: The unit of the metric. * * @return the unit value. */ public MetricUnit getUnit() { return this.unit; } /** * Set the unit property: The unit of the metric. * * @param unit the unit value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setUnit(MetricUnit unit) { this.unit = unit; return this; } /** * Get the primaryAggregationType property: the primary aggregation type value defining how to use the values for * display. * * @return the primaryAggregationType value. */ public AggregationType getPrimaryAggregationType() { return this.primaryAggregationType; } /** * Set the primaryAggregationType property: the primary aggregation type value defining how to use the values for * display. * * @param primaryAggregationType the primaryAggregationType value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setPrimaryAggregationType(AggregationType primaryAggregationType) { this.primaryAggregationType = primaryAggregationType; return this; } /** * Get the supportedAggregationTypes property: the collection of what aggregation types are supported. * * @return the supportedAggregationTypes value. */ public List<AggregationType> getSupportedAggregationTypes() { return this.supportedAggregationTypes; } /** * Set the supportedAggregationTypes property: the collection of what aggregation types are supported. * * @param supportedAggregationTypes the supportedAggregationTypes value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setSupportedAggregationTypes(List<AggregationType> supportedAggregationTypes) { this.supportedAggregationTypes = supportedAggregationTypes; return this; } /** * Get the metricAvailabilities property: the collection of what aggregation intervals are available to be queried. * * @return the metricAvailabilities value. */ public List<MetricAvailability> getMetricAvailabilities() { return this.metricAvailabilities; } /** * Set the metricAvailabilities property: the collection of what aggregation intervals are available to be queried. * * @param metricAvailabilities the metricAvailabilities value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setMetricAvailabilities(List<MetricAvailability> metricAvailabilities) { this.metricAvailabilities = metricAvailabilities; return this; } /** * Get the id property: the resource identifier of the metric definition. * * @return the id value. */ public String getId() { return this.id; } /** * Set the id property: the resource identifier of the metric definition. * * @param id the id value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setId(String id) { this.id = id; return this; } /** * Get the dimensions property: the name and the display name of the dimension, i.e. it is a localizable string. * * @return the dimensions value. */ public List<LocalizableString> getDimensions() { return this.dimensions; } /** * Set the dimensions property: the name and the display name of the dimension, i.e. it is a localizable string. * * @param dimensions the dimensions value to set. * @return the MetricDefinition object itself. */ public MetricDefinition setDimensions(List<LocalizableString> dimensions) { this.dimensions = dimensions; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { if (getName() != null) { getName().validate(); } if (getMetricAvailabilities() != null) { getMetricAvailabilities().forEach(e -> e.validate()); } if (getDimensions() != null) { getDimensions().forEach(e -> e.validate()); } } }
package com.github.cnenning.artiscm; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.io.IOUtils; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.cnenning.artiscm.ArtifactoryClient.Revision; import com.thoughtworks.go.plugin.api.GoPlugin; import com.thoughtworks.go.plugin.api.GoPluginIdentifier; import com.thoughtworks.go.plugin.api.annotation.Extension; import com.thoughtworks.go.plugin.api.response.GoPluginApiResponse; @Extension public class ArtifactoryScmPlugin extends AbstractArtifactoryPlugin implements GoPlugin { public static final String EXTENSION_NAME = "scm"; private static final List<String> GO_SUPPORTED_VERSIONS = Arrays.asList("1.0"); private static final GoPluginIdentifier GO_PLUGIN_ID = new GoPluginIdentifier(EXTENSION_NAME, GO_SUPPORTED_VERSIONS); public static final String REQUEST_SCM_CONFIGURATION = "scm-configuration"; public static final String REQUEST_SCM_VIEW = "scm-view"; public static final String REQUEST_SCM_VALIDATE_CONFIGURATION = "validate-scm-configuration"; public static final String REQUEST_SCM_CHECK_CONNECTION = "check-scm-connection"; public static final String REQUEST_SCM_LATEST_REVISION = "latest-revision"; public static final String REQUEST_SCM_LATEST_REVISIONS_SINCE = "latest-revisions-since"; public static final String REQUEST_SCM_CHECKOUT = "checkout"; public ArtifactoryScmPlugin() { logger.debug("extension instance created"); } @Override public GoPluginIdentifier pluginIdentifier() { return GO_PLUGIN_ID; } @Override protected GoPluginApiResponse handleApiRequest(String name, String body) throws IOException, ParseException { if (REQUEST_SCM_CONFIGURATION.equals(name)) { return toJson(handleScmConfig()); } else if (REQUEST_SCM_VIEW.equals(name)) { return toJson(handleScmView()); } else if (REQUEST_SCM_VALIDATE_CONFIGURATION.equals(name)) { return toJson(handleScmValidation(body)); } else if (REQUEST_SCM_CHECK_CONNECTION.equals(name)) { return toJson(handleCheckScmConnection(body)); } else if (REQUEST_SCM_LATEST_REVISION.equals(name)) { return toJson(handleLatestRevision(body)); } else if (REQUEST_SCM_LATEST_REVISIONS_SINCE.equals(name)) { return toJson(handleLatestRevisionsSince(body)); } else if (REQUEST_SCM_CHECKOUT.equals(name)) { return toJson(handleCheckout(body)); } return null; } private Map<String, Object> handleScmConfig() { // url Map<String, Object> map = new HashMap<>(); map.put("display-name", "url"); map.put("default-value", "http://artifactory.company.com/repository/path-to/dir-with-versions"); map.put("part-of-identity", Boolean.TRUE); Map<String, Object> wrapper = new HashMap<>(); wrapper.put("url", map); // filename pattern map = new HashMap<>(); map.put("display-name", "filename regex"); map.put("default-value", ""); map.put("part-of-identity", Boolean.TRUE); wrapper.put("pattern", map); // version pattern map = new HashMap<>(); map.put("display-name", "version regex"); map.put("default-value", ""); map.put("part-of-identity", Boolean.TRUE); wrapper.put("version_regex", map); // username map = new HashMap<>(); map.put("display-name", "username"); map.put("default-value", ""); map.put("part-of-identity", Boolean.FALSE); wrapper.put("username", map); // password map = new HashMap<>(); map.put("display-name", "password"); map.put("default-value", ""); map.put("part-of-identity", Boolean.FALSE); map.put("secure", Boolean.TRUE); wrapper.put("password", map); // dummy id map = new HashMap<>(); map.put("display-name", "dummy id"); map.put("default-value", ""); map.put("part-of-identity", Boolean.TRUE); wrapper.put("dummy_id", map); // version only map = new HashMap<>(); map.put("display-name", "version.txt only"); map.put("default-value", "false"); map.put("part-of-identity", Boolean.FALSE); wrapper.put("version_only", map); return wrapper; } private Map<String, String> handleScmView() throws IOException { InputStream inputStream = getClass().getResourceAsStream("/scm-config.html"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); IOUtils.copy(inputStream, baos); String view = baos.toString(); Map<String, String> map = new HashMap<>(); map.put("displayValue", "Artifactory SCM"); map.put("template", view); return map; } private List<Object> handleScmValidation(String inputJson) throws JsonParseException, JsonMappingException, IOException { Map config = new ObjectMapper().readValue(inputJson, Map.class); List<Object> valiErrors = new ArrayList<>(); String url = configValue(config, "url"); String pattern = configValue(config, "pattern"); logger.debug("validating url: " + url); logger.debug("validating pattern: " + pattern); List<String> validationMessagesUrl = validateUrl(url); List<String> validationMessagesPattern = validatePattern(pattern, false); addValidationErrors(valiErrors, "url", validationMessagesUrl); addValidationErrors(valiErrors, "pattern", validationMessagesPattern); return valiErrors; } private Map<String, Object> handleCheckScmConnection(String inputJson) throws JsonParseException, JsonMappingException, IOException { Map config = new ObjectMapper().readValue(inputJson, Map.class); String url = configValue(config, "url"); String versionRegex = configValue(config, "version_regex"); return checkConnection(url, versionRegex, userPw(config)); } private Map<String, Object> handleLatestRevision(String inputJson) throws JsonParseException, JsonMappingException, IOException { Map config = new ObjectMapper().readValue(inputJson, Map.class); String url = configValue(config, "url"); String versionRegex = configValue(config, "version_regex"); logger.debug("obtaining latest revision of: " + url + ", with regex: " + versionRegex); Revision revision = new ArtifactoryClient().latestRevision(url, versionRegex, httpClient, userPw(config)); Map<String, Object> revisionJson = buildRevisionJson(revision); Map<String, Object> map = new HashMap<>(); map.put("revision", revisionJson); return map; } private Map<String, Object> handleLatestRevisionsSince(String inputJson) throws JsonParseException, JsonMappingException, IOException { Map apiInput = new ObjectMapper().readValue(inputJson, Map.class); String url = configValue(apiInput, "url"); String versionRegex = configValue(apiInput, "version_regex"); Date since = dateFromApiInput(apiInput); logger.debug("obtaining latest revisions since '" + since + "' of: " + url + ", with regex: " + versionRegex); List<Revision> revisions = new ArtifactoryClient().latestRevisionsSince(url, versionRegex, httpClient, userPw(apiInput), since); List<Map<String, Object>> revJsonList = new ArrayList<>(revisions.size()); for (Revision revision : revisions) { Map<String, Object> revisionJson = buildRevisionJson(revision); revJsonList.add(revisionJson); } Map<String, Object> map = new HashMap<>(); map.put("revisions", revJsonList); return map; } private Map<String, Object> handleCheckout(String inputJson) { String status; String msg; try { Map apiInput = new ObjectMapper().readValue(inputJson, Map.class); String url = configValue(apiInput, "url"); String pattern = configValue(apiInput, "pattern"); String targetDirPath = targetDirFromApiInput(apiInput); String rev = revisonFromApiInput(apiInput); boolean versionOnly = versionOnly(apiInput); // create target dir File targetDir = new File(targetDirPath); if (!targetDir.exists()) { logger.info("creating target dir: " + targetDirPath); targetDir.mkdirs(); } // do checkout if (!versionOnly) { logger.debug("checking out, rev: '" + rev + "' from: " + url + ", pattern: " + pattern); url = url + rev; new ArtifactoryClient().downloadFiles(url, httpClient, userPw(apiInput), targetDir, pattern); } else { logger.debug("creating version file, rev: '" + rev + "' in: " + targetDir); File versionFile = new File(targetDir, "version.txt"); try(PrintWriter writer = new PrintWriter(new FileOutputStream(versionFile))) { writer.println(rev); } } status = "success"; msg = "Successfully checked out"; } catch (Exception e) { status = "failure"; msg = e.toString(); logger.error("could not checkout", e); } Map<String, Object> map = new HashMap<>(); map.put("status", status); map.put("messages", Arrays.asList(msg)); return map; } protected String configValue(Map config, String key) { return configValue(config, "scm-configuration", key); } protected boolean versionOnly(Map config) { String str = configValue(config, "version_only"); return isTrue(str); } protected UserPw userPw(Map config) { return new UserPw(configValue(config, "username"), configValue(config, "password")); } }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.modules.websocket; import android.util.Base64; import java.io.IOException; import java.lang.IllegalStateException; import javax.annotation.Nullable; import com.facebook.common.logging.FLog; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableMapKeySetIterator; import com.facebook.react.bridge.ReadableType; import com.facebook.react.bridge.WritableMap; import com.facebook.react.common.ReactConstants; import com.facebook.react.module.annotations.ReactModule; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.react.modules.network.ForwardingCookieHandler; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import okhttp3.ResponseBody; import okhttp3.ws.WebSocket; import okhttp3.ws.WebSocketCall; import okhttp3.ws.WebSocketListener; import java.net.URISyntaxException; import java.net.URI; import java.util.HashMap; import java.util.Map; import java.util.List; import java.util.concurrent.TimeUnit; import okio.Buffer; import okio.ByteString; @ReactModule(name = "WebSocketModule", hasConstants = false) public class WebSocketModule extends ReactContextBaseJavaModule { private final Map<Integer, WebSocket> mWebSocketConnections = new HashMap<>(); private ReactContext mReactContext; private ForwardingCookieHandler mCookieHandler; public WebSocketModule(ReactApplicationContext context) { super(context); mReactContext = context; mCookieHandler = new ForwardingCookieHandler(context); } private void sendEvent(String eventName, WritableMap params) { mReactContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit(eventName, params); } @Override public String getName() { return "WebSocketModule"; } @ReactMethod public void connect( final String url, @Nullable final ReadableArray protocols, @Nullable final ReadableMap headers, final int id) { OkHttpClient client = new OkHttpClient.Builder() .connectTimeout(10, TimeUnit.SECONDS) .writeTimeout(10, TimeUnit.SECONDS) .readTimeout(0, TimeUnit.MINUTES) // Disable timeouts for read .build(); Request.Builder builder = new Request.Builder() .tag(id) .url(url); String cookie = getCookie(url); if (cookie != null) { builder.addHeader("Cookie", cookie); } if (headers != null) { ReadableMapKeySetIterator iterator = headers.keySetIterator(); if (!headers.hasKey("origin")) { builder.addHeader("origin", getDefaultOrigin(url)); } while (iterator.hasNextKey()) { String key = iterator.nextKey(); if (ReadableType.String.equals(headers.getType(key))) { builder.addHeader(key, headers.getString(key)); } else { FLog.w( ReactConstants.TAG, "Ignoring: requested " + key + ", value not a string"); } } } else { builder.addHeader("origin", getDefaultOrigin(url)); } if (protocols != null && protocols.size() > 0) { StringBuilder protocolsValue = new StringBuilder(""); for (int i = 0; i < protocols.size(); i++) { String v = protocols.getString(i).trim(); if (!v.isEmpty() && !v.contains(",")) { protocolsValue.append(v); protocolsValue.append(","); } } if (protocolsValue.length() > 0) { protocolsValue.replace(protocolsValue.length() - 1, protocolsValue.length(), ""); builder.addHeader("Sec-WebSocket-Protocol", protocolsValue.toString()); } } WebSocketCall.create(client, builder.build()).enqueue(new WebSocketListener() { @Override public void onOpen(WebSocket webSocket, Response response) { mWebSocketConnections.put(id, webSocket); WritableMap params = Arguments.createMap(); params.putInt("id", id); sendEvent("websocketOpen", params); } @Override public void onClose(int code, String reason) { WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putInt("code", code); params.putString("reason", reason); sendEvent("websocketClosed", params); } @Override public void onFailure(IOException e, Response response) { notifyWebSocketFailed(id, e.getMessage()); } @Override public void onPong(Buffer buffer) { } @Override public void onMessage(ResponseBody response) throws IOException { String message; try { if (response.contentType() == WebSocket.BINARY) { message = Base64.encodeToString(response.source().readByteArray(), Base64.NO_WRAP); } else { message = response.source().readUtf8(); } } catch (IOException e) { notifyWebSocketFailed(id, e.getMessage()); return; } try { response.source().close(); } catch (IOException e) { FLog.e( ReactConstants.TAG, "Could not close BufferedSource for WebSocket id " + id, e); } WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putString("data", message); params.putString("type", response.contentType() == WebSocket.BINARY ? "binary" : "text"); sendEvent("websocketMessage", params); } }); // Trigger shutdown of the dispatcher's executor so this process can exit cleanly client.dispatcher().executorService().shutdown(); } @ReactMethod public void close(int code, String reason, int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // WebSocket is already closed // Don't do anything, mirror the behaviour on web return; } try { client.close(code, reason); mWebSocketConnections.remove(id); } catch (Exception e) { FLog.e( ReactConstants.TAG, "Could not close WebSocket connection for id " + id, e); } } @ReactMethod public void send(String message, int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // This is a programmer error throw new RuntimeException("Cannot send a message. Unknown WebSocket id " + id); } try { client.sendMessage(RequestBody.create(WebSocket.TEXT, message)); } catch (IOException | IllegalStateException e) { notifyWebSocketFailed(id, e.getMessage()); } } @ReactMethod public void sendBinary(String base64String, int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // This is a programmer error throw new RuntimeException("Cannot send a message. Unknown WebSocket id " + id); } try { client.sendMessage( RequestBody.create(WebSocket.BINARY, ByteString.decodeBase64(base64String))); } catch (IOException | IllegalStateException e) { notifyWebSocketFailed(id, e.getMessage()); } } @ReactMethod public void ping(int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // This is a programmer error throw new RuntimeException("Cannot send a message. Unknown WebSocket id " + id); } try { Buffer buffer = new Buffer(); client.sendPing(buffer); } catch (IOException | IllegalStateException e) { notifyWebSocketFailed(id, e.getMessage()); } } private void notifyWebSocketFailed(int id, String message) { WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putString("message", message); sendEvent("websocketFailed", params); } /** * Get the default HTTP(S) origin for a specific WebSocket URI * * @param String uri * @return A string of the endpoint converted to HTTP protocol (http[s]://host[:port]) */ private static String getDefaultOrigin(String uri) { try { String defaultOrigin; String scheme = ""; URI requestURI = new URI(uri); if (requestURI.getScheme().equals("wss")) { scheme += "https"; } else if (requestURI.getScheme().equals("ws")) { scheme += "http"; } else if (requestURI.getScheme().equals("http") || requestURI.getScheme().equals("https")) { scheme += requestURI.getScheme(); } if (requestURI.getPort() != -1) { defaultOrigin = String.format( "%s://%s:%s", scheme, requestURI.getHost(), requestURI.getPort()); } else { defaultOrigin = String.format("%s://%s/", scheme, requestURI.getHost()); } return defaultOrigin; } catch (URISyntaxException e) { throw new IllegalArgumentException("Unable to set " + uri + " as default origin header"); } } /** * Get the cookie for a specific domain * * @param String uri * @return The cookie header or null if none is set */ private String getCookie(String uri) { try { URI origin = new URI(getDefaultOrigin(uri)); Map<String, List<String>> cookieMap = mCookieHandler.get(origin, new HashMap()); List<String> cookieList = cookieMap.get("Cookie"); if (cookieList == null || cookieList.isEmpty()) { return null; } return cookieList.get(0); } catch (URISyntaxException | IOException e) { throw new IllegalArgumentException("Unable to get cookie from " + uri); } } }
package com.zooz.common.client.ecomm.beans.responses; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.zooz.common.client.ecomm.beans.ExternalRisk; import com.zooz.common.client.ecomm.beans.Risk; import com.zooz.common.client.ecomm.beans.ThreeDSecureParameters; import java.util.Map; /** * The AuthorizeResponse includes any of the response parameter returns for the corresponding requests. */ @JsonIgnoreProperties(ignoreUnknown = true) public class AuthorizeResponse extends ZooZProcessingResponseObject { /** * Define the response type * Acceptable parameters: authorizeCompletion [AC] 3DSecure [3D] AliPay [AP] * authorizeCompletion - for regular authorization without using 3DSecure or AliPay * 3DSecure " for 3DSecure authentication response * AliPay " for AliPay / PayEase response */ @JsonProperty private String responseType; /** * Raw authorization code as received from the acquirer. */ @JsonProperty private String authorizationCode; /** * Reference id for the request, received from the processor. */ @JsonProperty private String processorReferenceId; /** * The parameters which is needed for Three D Secure flow. */ @JsonProperty private ThreeDSecureParameters obj3DSecure; /** * Fraud detection response as received from the processor. */ @JsonProperty private Map fraudDetectionResponse; /** * The MID (Merchant ID) associated to the payment. */ @JsonProperty private String merchantId; /** * The processor response result code */ @JsonProperty private Integer processorResultCode; /** * The processor response result code */ @JsonProperty private String reconciliationId; /** * The processor slip Number */ @JsonProperty private String slipNumber; /** * The risk that was returned by ZooZ Risk Engine */ @JsonProperty private Risk risk; /** * The name of the processor that processes the transaction */ @JsonProperty private String processorName; @JsonProperty private ExternalRisk externalRisk; @JsonProperty private Map providerCustomData; /** * The raw response of the cv2Avs check */ @JsonProperty private String cv2AvsRaw; /** * Instantiates a new Authorize response. */ public AuthorizeResponse() { } /** * Gets the response type. * * @return the response type */ public String getResponseType() { return responseType; } /** * Sets the response type. * * @param responseType the response type */ public void setResponseType(String responseType) { this.responseType = responseType; } /** * Gets the authorization code. * * @return the authorization code */ public String getAuthorizationCode() { return authorizationCode; } /** * Sets the authorization code. * * @param authorizationCode the authorization code */ public void setAuthorizationCode(String authorizationCode) { this.authorizationCode = authorizationCode; } /** * Gets the processor reference id. * * @return the processor reference id */ public String getProcessorReferenceId() { return processorReferenceId; } /** * Sets the processor reference id. * * @param processorReferenceId the processor reference id */ public void setProcessorReferenceId(String processorReferenceId) { this.processorReferenceId = processorReferenceId; } /** * Gets the obj 3 d secure. * * @return the obj 3 d secure */ public ThreeDSecureParameters getObj3DSecure() { return obj3DSecure; } /** * Sets the obj 3 d secure. * * @param obj3DSecure the obj 3 d secure */ public void setObj3DSecure(ThreeDSecureParameters obj3DSecure) { this.obj3DSecure = obj3DSecure; } /** * Gets the fraud detection response. * * @return the fraud detection response */ public Map getFraudDetectionResponse() { return fraudDetectionResponse; } /** * Sets the fraud detection response. * * @param fraudDetectionResponse the fraud detection response */ public void setFraudDetectionResponse(Map fraudDetectionResponse) { this.fraudDetectionResponse = fraudDetectionResponse; } /** * Gets the merchant id. * * @return the merchant id */ public String getMerchantId() { return merchantId; } /** * Sets the merchant id. * * @param merchantId the merchant id */ public void setMerchantId(String merchantId) { this.merchantId = merchantId; } /** * Gets processor result code * * @return processor result code */ public Integer getProcessorResultCode() { return processorResultCode; } /** * set processor result code * * @param processorResultCode the processor result code */ public void setProcessorResultCode(Integer processorResultCode) { this.processorResultCode = processorResultCode; } /** * Gets reconciliation id. * * @return reconciliationId reconciliation id */ public String getReconciliationId() { return reconciliationId; } /** * set reconciliation Id * * @param reconciliationId the reconciliation id */ public void setReconciliationId(String reconciliationId) { this.reconciliationId = reconciliationId; } /** * gets the processor slip number * * @return slipNumber slip number */ public String getSlipNumber() { return slipNumber; } /** * sets the processor slip number * * @param slipNumber the slip number */ public void setSlipNumber(String slipNumber) { this.slipNumber = slipNumber; } /** * gets the risk * * @return risk risk */ public Risk getRisk() { return risk; } /** * Sets the risk * * @param risk the risk */ public void setRisk(Risk risk) { this.risk = risk; } /** * gets the name of the processor that processes the transaction * * @return processorName processor name */ public String getProcessorName() { return processorName; } /** * Sets the name of the processor that processes the transaction * * @param processorName the processor name */ public void setProcessorName(String processorName) { this.processorName = processorName; } public Map getProviderCustomData() { return providerCustomData; } public void setProviderCustomData(Map providerCustomData) { this.providerCustomData = providerCustomData; } /** * Gets the raw response of the cv2Avs check * @return the raw response of the cv2Avs check */ public String getCv2AvsRaw() { return cv2AvsRaw; } /** * Sets the raw response of the cv2Avs check * @param cv2AvsRaw the raw response of the cv2Avs check */ public void setCv2AvsRaw(String cv2AvsRaw) { this.cv2AvsRaw = cv2AvsRaw; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.parser.common; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.apache.sysml.parser.AssignmentStatement; import org.apache.sysml.parser.BinaryExpression; import org.apache.sysml.parser.BooleanExpression; import org.apache.sysml.parser.BooleanIdentifier; import org.apache.sysml.parser.BuiltinFunctionExpression; import org.apache.sysml.parser.ConstIdentifier; import org.apache.sysml.parser.DMLProgram; import org.apache.sysml.parser.DataExpression; import org.apache.sysml.parser.DataIdentifier; import org.apache.sysml.parser.DoubleIdentifier; import org.apache.sysml.parser.Expression; import org.apache.sysml.parser.Expression.DataOp; import org.apache.sysml.parser.FunctionCallIdentifier; import org.apache.sysml.parser.IntIdentifier; import org.apache.sysml.parser.LanguageException; import org.apache.sysml.parser.MultiAssignmentStatement; import org.apache.sysml.parser.OutputStatement; import org.apache.sysml.parser.ParameterExpression; import org.apache.sysml.parser.ParameterizedBuiltinFunctionExpression; import org.apache.sysml.parser.PrintStatement; import org.apache.sysml.parser.RelationalExpression; import org.apache.sysml.parser.Statement; import org.apache.sysml.parser.StringIdentifier; import org.apache.sysml.parser.dml.DmlParser.BuiltinFunctionExpressionContext; import org.apache.sysml.parser.dml.DmlSyntacticValidator; import org.apache.sysml.parser.pydml.PydmlSyntacticValidator; /** * Contains fields and (helper) methods common to {@link DmlSyntacticValidator} and {@link PydmlSyntacticValidator} */ public abstract class CommonSyntacticValidator { protected final CustomErrorListener errorListener; protected final String currentFile; protected String _workingDir = "."; //current working directory protected Map<String,String> argVals = null; protected String sourceNamespace = null; // Track imported scripts to prevent infinite recursion protected static ThreadLocal<HashMap<String, String>> _scripts = new ThreadLocal<HashMap<String, String>>() { @Override protected HashMap<String, String> initialValue() { return new HashMap<String, String>(); } }; // Map namespaces to full paths as defined only from source statements in this script (i.e., currentFile) protected HashMap<String, String> sources; // Names of new internal and external functions defined in this script (i.e., currentFile) protected Set<String> functions; public static void init() { _scripts.get().clear(); } public CommonSyntacticValidator(CustomErrorListener errorListener, Map<String,String> argVals, String sourceNamespace, Set<String> prepFunctions) { this.errorListener = errorListener; currentFile = errorListener.getCurrentFileName(); this.argVals = argVals; this.sourceNamespace = sourceNamespace; sources = new HashMap<String, String>(); functions = (null != prepFunctions) ? prepFunctions : new HashSet<String>(); } protected void notifyErrorListeners(String message, int line, int charPositionInLine) { errorListener.validationError(line, charPositionInLine, message); } protected void notifyErrorListeners(String message, Token op) { errorListener.validationError(op.getLine(), op.getCharPositionInLine(), message); } protected void raiseWarning(String message, Token op) { errorListener.validationWarning(op.getLine(), op.getCharPositionInLine(), message); } /** * Obtain the namespace separator ({@code ::} for DML and {@code .} for * PYDML) that is used to specify a namespace and a function in that * namespace. * * @return The namespace separator */ public abstract String namespaceResolutionOp(); /** * Obtain the namespace and the function name as a two-element array based * on the fully-qualified function name. If no namespace is supplied in * front of the function name, the default namespace will be used. * * @param fullyQualifiedFunctionName * Namespace followed by separator ({@code ::} for DML and * {@code .} for PYDML) followed by function name (for example, * {@code mynamespace::myfunctionname}), or only function name if * the default namespace is used (for example, * {@code myfunctionname}). * @return Two-element array consisting of namespace and function name, or * {@code null}. */ protected String[] getQualifiedNames(String fullyQualifiedFunctionName) { String splitStr = Pattern.quote(namespaceResolutionOp()); String [] fnNames = fullyQualifiedFunctionName.split(splitStr); String functionName = ""; String namespace = ""; if(fnNames.length == 1) { namespace = DMLProgram.DEFAULT_NAMESPACE; functionName = fnNames[0].trim(); } else if(fnNames.length == 2) { namespace = getQualifiedNamespace(fnNames[0].trim()); functionName = fnNames[1].trim(); } else return null; String[] retVal = new String[2]; retVal[0] = namespace; retVal[1] = functionName; return retVal; } protected String getQualifiedNamespace(String namespace) { String path = sources.get(namespace); return (path != null && path.length() > 0) ? path : namespace; } protected void validateNamespace(String namespace, String filePath, ParserRuleContext ctx) { if (!sources.containsKey(namespace)) { sources.put(namespace, filePath); } else { notifyErrorListeners("Namespace Conflict: '" + namespace + "' already defined as " + sources.get(namespace), ctx.start); } } protected boolean validateBuiltinFunctions(String function) { String functionName = function.replaceAll(" ", "").trim(); if(functionName.equals("write") || functionName.equals(DMLProgram.DEFAULT_NAMESPACE + namespaceResolutionOp() + "write")) { return validateBuiltinWriteFunction(function); } return true; } protected boolean validateBuiltinWriteFunction(String function) { return true; } protected void setFileLineColumn(Expression expr, ParserRuleContext ctx) { String txt = ctx.getText(); expr.setFilename(currentFile); expr.setBeginLine(ctx.start.getLine()); expr.setBeginColumn(ctx.start.getCharPositionInLine()); expr.setEndLine(ctx.stop.getLine()); expr.setEndColumn(ctx.stop.getCharPositionInLine()); if(expr.getBeginColumn() == expr.getEndColumn() && expr.getBeginLine() == expr.getEndLine() && txt.length() > 1) { expr.setEndColumn(expr.getBeginColumn() + txt.length() - 1); } } protected void setFileLineColumn(Statement stmt, ParserRuleContext ctx) { String txt = ctx.getText(); stmt.setFilename(currentFile); stmt.setBeginLine(ctx.start.getLine()); stmt.setBeginColumn(ctx.start.getCharPositionInLine()); stmt.setEndLine(ctx.stop.getLine()); stmt.setEndColumn(ctx.stop.getCharPositionInLine()); if(stmt.getBeginColumn() == stmt.getEndColumn() && stmt.getBeginLine() == stmt.getEndLine() && txt.length() > 1) { stmt.setEndColumn(stmt.getBeginColumn() + txt.length() - 1); } } // For String literal "True/TRUE" public abstract String trueStringLiteral(); // For String literal "False/FALSE" public abstract String falseStringLiteral(); // -------------------------------------------------------------------- // HELPER METHODS FOR OVERRIDDEN VISITOR FUNCTIONS // -------------------------------------------------------------------- protected void binaryExpressionHelper(ParserRuleContext ctx, ExpressionInfo left, ExpressionInfo right, ExpressionInfo me, String op) { if(left.expr != null && right.expr != null) { Expression.BinaryOp bop = Expression.getBinaryOp(op); BinaryExpression be = new BinaryExpression(bop); be = new BinaryExpression(bop); be.setLeft(left.expr); be.setRight(right.expr); me.expr = be; setFileLineColumn(me.expr, ctx); } } protected void relationalExpressionHelper(ParserRuleContext ctx, ExpressionInfo left, ExpressionInfo right, ExpressionInfo me, String op) { if(left.expr != null && right.expr != null) { Expression.RelationalOp rop = Expression.getRelationalOp(op); RelationalExpression re = new RelationalExpression(rop); re.setLeft(left.expr); re.setRight(right.expr); me.expr = re; setFileLineColumn(me.expr, ctx); } } protected void booleanExpressionHelper(ParserRuleContext ctx, ExpressionInfo left, ExpressionInfo right, ExpressionInfo me, String op) { if(left.expr != null && right.expr != null) { Expression.BooleanOp bop = Expression.getBooleanOp(op); BooleanExpression re = new BooleanExpression(bop); re.setLeft(left.expr); re.setRight(right.expr); me.expr = re; setFileLineColumn(me.expr, ctx); } } protected void unaryExpressionHelper(ParserRuleContext ctx, ExpressionInfo left, ExpressionInfo me, String op) { if(left.expr != null) { Token start = ctx.start; String fileName = currentFile; int line = start.getLine(); int col = start.getCharPositionInLine(); if(left.expr instanceof IntIdentifier) { if(op.equals("-")) { ((IntIdentifier) left.expr).multiplyByMinusOne(); } me.expr = left.expr; } else if(left.expr instanceof DoubleIdentifier) { if(op.equals("-")) { ((DoubleIdentifier) left.expr).multiplyByMinusOne(); } me.expr = left.expr; } else { Expression right = new IntIdentifier(1, fileName, line, col, line, col); if(op.equals("-")) { right = new IntIdentifier(-1, fileName, line, col, line, col); } Expression.BinaryOp bop = Expression.getBinaryOp("*"); BinaryExpression be = new BinaryExpression(bop); be.setLeft(left.expr); be.setRight(right); me.expr = be; } setFileLineColumn(me.expr, ctx); } } protected void unaryBooleanExpressionHelper(ParserRuleContext ctx, ExpressionInfo left, ExpressionInfo me, String op) { if(left.expr != null) { Expression.BooleanOp bop = Expression.getBooleanOp(op); BooleanExpression be = new BooleanExpression(bop); be.setLeft(left.expr); me.expr = be; setFileLineColumn(me.expr, ctx); } } protected void constDoubleIdExpressionHelper(ParserRuleContext ctx, ExpressionInfo me) { try { Token start = ctx.start; double val = Double.parseDouble(ctx.getText()); int linePosition = start.getLine(); int charPosition = start.getCharPositionInLine(); me.expr = new DoubleIdentifier(val, currentFile, linePosition, charPosition, linePosition, charPosition); setFileLineColumn(me.expr, ctx); } catch(Exception e) { notifyErrorListeners("cannot parse the float value: \'" + ctx.getText() + "\'", ctx.getStart()); return; } } protected void constIntIdExpressionHelper(ParserRuleContext ctx, ExpressionInfo me) { try { Token start = ctx.start; long val = Long.parseLong(ctx.getText()); int linePosition = start.getLine(); int charPosition = start.getCharPositionInLine(); me.expr = new IntIdentifier(val, currentFile, linePosition, charPosition, linePosition, charPosition); setFileLineColumn(me.expr, ctx); } catch(Exception e) { notifyErrorListeners("cannot parse the int value: \'" + ctx.getText() + "\'", ctx.getStart()); return; } } protected String extractStringInQuotes(String text, boolean inQuotes) { String val = null; if(inQuotes) { if( (text.startsWith("\"") && text.endsWith("\"")) || (text.startsWith("\'") && text.endsWith("\'"))) { if(text.length() > 2) { val = text.substring(1, text.length()-1) .replaceAll("\\\\b","\b") .replaceAll("\\\\t","\t") .replaceAll("\\\\n","\n") .replaceAll("\\\\f","\f") .replaceAll("\\\\r","\r"); } else if(text.equals("\"\"") || text.equals("\'\'")) { val = ""; } } } else { val = text.replaceAll("\\\\b","\b") .replaceAll("\\\\t","\t") .replaceAll("\\\\n","\n") .replaceAll("\\\\f","\f") .replaceAll("\\\\r","\r"); } return val; } protected void constStringIdExpressionHelper(ParserRuleContext ctx, ExpressionInfo me) { String val = extractStringInQuotes(ctx.getText(), true); if(val == null) { notifyErrorListeners("incorrect string literal ", ctx.start); return; } int linePosition = ctx.start.getLine(); int charPosition = ctx.start.getCharPositionInLine(); me.expr = new StringIdentifier(val, currentFile, linePosition, charPosition, linePosition, charPosition); setFileLineColumn(me.expr, ctx); } protected void booleanIdentifierHelper(ParserRuleContext ctx, boolean val, ExpressionInfo info) { int linePosition = ctx.start.getLine(); int charPosition = ctx.start.getCharPositionInLine(); info.expr = new BooleanIdentifier(val, currentFile, linePosition, charPosition, linePosition, charPosition); setFileLineColumn(info.expr, ctx); } protected void exitDataIdExpressionHelper(ParserRuleContext ctx, ExpressionInfo me, ExpressionInfo dataInfo) { me.expr = dataInfo.expr; // If "The parameter $X either needs to be passed through commandline or initialized to default value" validation // error occurs, then dataInfo.expr is null which would cause a null pointer exception with the following code. // Therefore, check for null so that parsing can continue so all parsing issues can be determined. if (me.expr != null) { int line = ctx.start.getLine(); int col = ctx.start.getCharPositionInLine(); me.expr.setAllPositions(currentFile, line, col, line, col); setFileLineColumn(me.expr, ctx); } } protected ConstIdentifier getConstIdFromString(String varValue, Token start) { int linePosition = start.getLine(); int charPosition = start.getCharPositionInLine(); // Compare to "True/TRUE" if(varValue.equals(trueStringLiteral())) return new BooleanIdentifier(true, currentFile, linePosition, charPosition, linePosition, charPosition); // Compare to "False/FALSE" if(varValue.equals(falseStringLiteral())) return new BooleanIdentifier(false, currentFile, linePosition, charPosition, linePosition, charPosition); // Check for long literal // NOTE: we use exception handling instead of Longs.tryParse for backwards compatibility with guava <14.1 // Also the alternative of Ints.tryParse and falling back to double would not be lossless in all cases. try { long lval = Long.parseLong(varValue); return new IntIdentifier(lval, currentFile, linePosition, charPosition, linePosition, charPosition); } catch(Exception ex) { //continue } // Check for double literal // NOTE: we use exception handling instead of Doubles.tryParse for backwards compatibility with guava <14.0 try { double dval = Double.parseDouble(varValue); return new DoubleIdentifier(dval, currentFile, linePosition, charPosition, linePosition, charPosition); } catch(Exception ex) { //continue } // Otherwise it is a string literal (optionally enclosed within single or double quotes) String val = ""; String text = varValue; if( (text.startsWith("\"") && text.endsWith("\"")) || (text.startsWith("\'") && text.endsWith("\'"))) { if(text.length() > 2) { val = extractStringInQuotes(text, true); } } else { // the commandline parameters can be passed without any quotes val = extractStringInQuotes(text, false); } return new StringIdentifier(val, currentFile, linePosition, charPosition, linePosition, charPosition); } protected void fillExpressionInfoCommandLineParameters(String varName, ExpressionInfo dataInfo, Token start) { if(!varName.startsWith("$")) { notifyErrorListeners("commandline param doesnot start with $", start); return; } String varValue = null; for(Map.Entry<String, String> arg : this.argVals.entrySet()) { if(arg.getKey().equals(varName)) { if(varValue != null) { notifyErrorListeners("multiple values passed for the parameter " + varName + " via commandline", start); return; } else { varValue = arg.getValue(); } } } if(varValue == null) { return; } // Command line param cannot be empty string // If you want to pass space, please quote it if(varValue.equals("")) return; dataInfo.expr = getConstIdFromString(varValue, start); } protected void exitAssignmentStatementHelper(ParserRuleContext ctx, String lhs, ExpressionInfo dataInfo, Token lhsStart, ExpressionInfo rhs, StatementInfo info) { if(lhs.startsWith("$")) { notifyErrorListeners("assignment of commandline parameters is not allowed. (Quickfix: try using someLocalVariable=ifdef(" + lhs + ", default value))", ctx.start); return; } DataIdentifier target = null; if(dataInfo.expr instanceof DataIdentifier) { target = (DataIdentifier) dataInfo.expr; Expression source = rhs.expr; int line = ctx.start.getLine(); int col = ctx.start.getCharPositionInLine(); try { info.stmt = new AssignmentStatement(target, source, line, col, line, col); setFileLineColumn(info.stmt, ctx); } catch (LanguageException e) { // TODO: extract more meaningful info from this exception. notifyErrorListeners("invalid assignment", lhsStart); return; } } else { notifyErrorListeners("incorrect lvalue in assignment statement", lhsStart); return; } } // ----------------------------------------------------------------- // Helper Functions for exit*FunctionCall*AssignmentStatement // ----------------------------------------------------------------- protected void setPrintStatement(ParserRuleContext ctx, String functionName, ArrayList<ParameterExpression> paramExpression, StatementInfo thisinfo) { if(paramExpression.size() != 1) { notifyErrorListeners(functionName + "() has only one parameter", ctx.start); return; } Expression expr = paramExpression.get(0).getExpr(); if(expr == null) { notifyErrorListeners("cannot process " + functionName + "() function", ctx.start); return; } try { int line = ctx.start.getLine(); int col = ctx.start.getCharPositionInLine(); thisinfo.stmt = new PrintStatement(functionName, expr, line, col, line, col); setFileLineColumn(thisinfo.stmt, ctx); } catch (LanguageException e) { notifyErrorListeners("cannot process " + functionName + "() function", ctx.start); return; } } protected void setOutputStatement(ParserRuleContext ctx, ArrayList<ParameterExpression> paramExpression, StatementInfo info) { if(paramExpression.size() < 2){ notifyErrorListeners("incorrect usage of write function (at least 2 arguments required)", ctx.start); return; } if(paramExpression.get(0).getExpr() instanceof DataIdentifier) { String fileName = currentFile; int line = ctx.start.getLine(); int col = ctx.start.getCharPositionInLine(); HashMap<String, Expression> varParams = new HashMap<String, Expression>(); varParams.put(DataExpression.IO_FILENAME, paramExpression.get(1).getExpr()); for(int i = 2; i < paramExpression.size(); i++) { // DataExpression.FORMAT_TYPE, DataExpression.DELIM_DELIMITER, DataExpression.DELIM_HAS_HEADER_ROW, DataExpression.DELIM_SPARSE varParams.put(paramExpression.get(i).getName(), paramExpression.get(i).getExpr()); } DataExpression dataExpression = new DataExpression(DataOp.WRITE, varParams, fileName, line, col, line, col); info.stmt = new OutputStatement((DataIdentifier) paramExpression.get(0).getExpr(), DataOp.WRITE, fileName, line, col, line, col); setFileLineColumn(info.stmt, ctx); ((OutputStatement)info.stmt).setExprParams(dataExpression); } else { notifyErrorListeners("incorrect usage of write function", ctx.start); } } protected void setAssignmentStatement(ParserRuleContext ctx, StatementInfo info, DataIdentifier target, Expression expression) { try { info.stmt = new AssignmentStatement(target, expression, ctx.start.getLine(), ctx.start.getCharPositionInLine(), ctx.start.getLine(), ctx.start.getCharPositionInLine()); setFileLineColumn(info.stmt, ctx); } catch (LanguageException e) { // TODO: extract more meaningful info from this exception. notifyErrorListeners("invalid function call", ctx.start); return; } } /** * Information about built in functions converted to a common format between * PyDML and DML for the runtime. */ public static class ConvertedDMLSyntax { public final String namespace; public final String functionName; public final ArrayList<ParameterExpression> paramExpression; public ConvertedDMLSyntax(String namespace, String functionName, ArrayList<ParameterExpression> paramExpression) { this.namespace = namespace; this.functionName = functionName; this.paramExpression = paramExpression; } }; /** * Converts PyDML/DML built in functions to a common format for the runtime. * @param ctx * @param namespace Namespace of the function * @param functionName Name of the builtin function * @param paramExpression Array of parameter names and values * @param fnName Token of the built in function identifier * @return */ protected abstract ConvertedDMLSyntax convertToDMLSyntax(ParserRuleContext ctx, String namespace, String functionName, ArrayList<ParameterExpression> paramExpression, Token fnName); /** * Function overridden for DML & PyDML that handles any language specific builtin functions * @param ctx * @param functionName * @param paramExpressions * @return instance of {@link Expression} */ protected abstract Expression handleLanguageSpecificFunction(ParserRuleContext ctx, String functionName, ArrayList<ParameterExpression> paramExpressions); /** Checks for builtin functions and does Action 'f'. * <br/> * Constructs the * appropriate {@link AssignmentStatement} from * {@link CommonSyntacticValidator#functionCallAssignmentStatementHelper(ParserRuleContext, Set, Set, Expression, StatementInfo, Token, Token, String, String, ArrayList, boolean) * or Assign to {@link Expression} from * {@link DmlSyntacticValidator#exitBuiltinFunctionExpression(BuiltinFunctionExpressionContext)} * * @param ctx * @param functionName * @param paramExpressions * @return true if a builtin function was found */ protected boolean buildForBuiltInFunction(ParserRuleContext ctx, String functionName, ArrayList<ParameterExpression> paramExpressions, Action f) { // In global namespace, so it can be a builtin function // Double verification: verify passed function name is a (non-parameterized) built-in function. String fileName = currentFile; int line = ctx.start.getLine(); int col = ctx.start.getCharPositionInLine(); try { if (functions.contains(functionName)) { // It is a user function definition (which takes precedence if name same as built-in) return false; } Expression lsf = handleLanguageSpecificFunction(ctx, functionName, paramExpressions); if (lsf != null){ setFileLineColumn(lsf, ctx); f.execute(lsf); return true; } BuiltinFunctionExpression bife = BuiltinFunctionExpression.getBuiltinFunctionExpression(functionName, paramExpressions, fileName, line, col, line, col); if (bife != null){ // It is a builtin function f.execute(bife); return true; } ParameterizedBuiltinFunctionExpression pbife = ParameterizedBuiltinFunctionExpression.getParamBuiltinFunctionExpression(functionName, paramExpressions, fileName, line, col, line, col); if (pbife != null){ // It is a parameterized builtin function f.execute(pbife); return true; } // built-in read, rand ... DataExpression dbife = DataExpression.getDataExpression(functionName, paramExpressions, fileName, line, col, line, col); if (dbife != null){ f.execute(dbife); return true; } } catch(Exception e) { notifyErrorListeners("unable to process builtin function expression " + functionName + ":" + e.getMessage(), ctx.start); return true; } return false; } protected void functionCallAssignmentStatementHelper(final ParserRuleContext ctx, Set<String> printStatements, Set<String> outputStatements, final Expression dataInfo, final StatementInfo info, final Token nameToken, Token targetListToken, String namespace, String functionName, ArrayList<ParameterExpression> paramExpression, boolean hasLHS) { ConvertedDMLSyntax convertedSyntax = convertToDMLSyntax(ctx, namespace, functionName, paramExpression, nameToken); if(convertedSyntax == null) { return; } else { namespace = convertedSyntax.namespace; functionName = convertedSyntax.functionName; paramExpression = convertedSyntax.paramExpression; } // For builtin functions without LHS if(namespace.equals(DMLProgram.DEFAULT_NAMESPACE) && !functions.contains(functionName)) { if (printStatements.contains(functionName)){ setPrintStatement(ctx, functionName, paramExpression, info); return; } else if (outputStatements.contains(functionName)){ setOutputStatement(ctx, paramExpression, info); return; } } if (!hasLHS){ notifyErrorListeners("function call needs to have lvalue (Quickfix: change it to \'tmpVar = " + functionName + "(...)\')", nameToken); return; } DataIdentifier target = null; if(dataInfo instanceof DataIdentifier) { target = (DataIdentifier) dataInfo; } else { notifyErrorListeners("incorrect lvalue for function call ", targetListToken); return; } // For builtin functions with LHS if(namespace.equals(DMLProgram.DEFAULT_NAMESPACE) && !functions.contains(functionName)){ final DataIdentifier ftarget = target; Action f = new Action() { @Override public void execute(Expression e) { setAssignmentStatement(ctx, info , ftarget, e); } }; boolean validBIF = buildForBuiltInFunction(ctx, functionName, paramExpression, f); if (validBIF) return; } // If builtin functions weren't found... FunctionCallIdentifier functCall = new FunctionCallIdentifier(paramExpression); functCall.setFunctionName(functionName); // Override default namespace for imported non-built-in function String inferNamespace = (sourceNamespace != null && sourceNamespace.length() > 0 && DMLProgram.DEFAULT_NAMESPACE.equals(namespace)) ? sourceNamespace : namespace; functCall.setFunctionNamespace(inferNamespace); functCall.setAllPositions(currentFile, ctx.start.getLine(), ctx.start.getCharPositionInLine(), ctx.stop.getLine(), ctx.stop.getCharPositionInLine()); setAssignmentStatement(ctx, info, target, functCall); } /** * To allow for different actions in * {@link CommonSyntacticValidator#functionCallAssignmentStatementHelper(ParserRuleContext, Set, Set, Expression, StatementInfo, Token, Token, String, String, ArrayList)} */ public static interface Action { public void execute(Expression e); } protected void setMultiAssignmentStatement(ArrayList<DataIdentifier> target, Expression expression, ParserRuleContext ctx, StatementInfo info) { info.stmt = new MultiAssignmentStatement(target, expression); info.stmt.setAllPositions(currentFile, ctx.start.getLine(), ctx.start.getCharPositionInLine(), ctx.start.getLine(), ctx.start.getCharPositionInLine()); setFileLineColumn(info.stmt, ctx); } // ----------------------------------------------------------------- // End of Helper Functions for exit*FunctionCall*AssignmentStatement // ----------------------------------------------------------------- }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: BseGuildReqMemberList.proto package com.xinqihd.sns.gameserver.proto; public final class XinqiBseGuildReqMemberList { private XinqiBseGuildReqMemberList() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface BseGuildReqMemberListOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .com.xinqihd.sns.gameserver.proto.GuildMember reqMembers = 1; java.util.List<com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember> getReqMembersList(); com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember getReqMembers(int index); int getReqMembersCount(); java.util.List<? extends com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder> getReqMembersOrBuilderList(); com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder getReqMembersOrBuilder( int index); } public static final class BseGuildReqMemberList extends com.google.protobuf.GeneratedMessage implements BseGuildReqMemberListOrBuilder { // Use BseGuildReqMemberList.newBuilder() to construct. private BseGuildReqMemberList(Builder builder) { super(builder); } private BseGuildReqMemberList(boolean noInit) {} private static final BseGuildReqMemberList defaultInstance; public static BseGuildReqMemberList getDefaultInstance() { return defaultInstance; } public BseGuildReqMemberList getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_fieldAccessorTable; } // repeated .com.xinqihd.sns.gameserver.proto.GuildMember reqMembers = 1; public static final int REQMEMBERS_FIELD_NUMBER = 1; private java.util.List<com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember> reqMembers_; public java.util.List<com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember> getReqMembersList() { return reqMembers_; } public java.util.List<? extends com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder> getReqMembersOrBuilderList() { return reqMembers_; } public int getReqMembersCount() { return reqMembers_.size(); } public com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember getReqMembers(int index) { return reqMembers_.get(index); } public com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder getReqMembersOrBuilder( int index) { return reqMembers_.get(index); } private void initFields() { reqMembers_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getReqMembersCount(); i++) { if (!getReqMembers(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < reqMembers_.size(); i++) { output.writeMessage(1, reqMembers_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < reqMembers_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, reqMembers_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_fieldAccessorTable; } // Construct using com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getReqMembersFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (reqMembersBuilder_ == null) { reqMembers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { reqMembersBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList.getDescriptor(); } public com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList getDefaultInstanceForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList.getDefaultInstance(); } public com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList build() { com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList buildPartial() { com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList result = new com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList(this); int from_bitField0_ = bitField0_; if (reqMembersBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { reqMembers_ = java.util.Collections.unmodifiableList(reqMembers_); bitField0_ = (bitField0_ & ~0x00000001); } result.reqMembers_ = reqMembers_; } else { result.reqMembers_ = reqMembersBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList) { return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList other) { if (other == com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList.getDefaultInstance()) return this; if (reqMembersBuilder_ == null) { if (!other.reqMembers_.isEmpty()) { if (reqMembers_.isEmpty()) { reqMembers_ = other.reqMembers_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureReqMembersIsMutable(); reqMembers_.addAll(other.reqMembers_); } onChanged(); } } else { if (!other.reqMembers_.isEmpty()) { if (reqMembersBuilder_.isEmpty()) { reqMembersBuilder_.dispose(); reqMembersBuilder_ = null; reqMembers_ = other.reqMembers_; bitField0_ = (bitField0_ & ~0x00000001); reqMembersBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getReqMembersFieldBuilder() : null; } else { reqMembersBuilder_.addAllMessages(other.reqMembers_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getReqMembersCount(); i++) { if (!getReqMembers(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder subBuilder = com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addReqMembers(subBuilder.buildPartial()); break; } } } } private int bitField0_; // repeated .com.xinqihd.sns.gameserver.proto.GuildMember reqMembers = 1; private java.util.List<com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember> reqMembers_ = java.util.Collections.emptyList(); private void ensureReqMembersIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { reqMembers_ = new java.util.ArrayList<com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember>(reqMembers_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder> reqMembersBuilder_; public java.util.List<com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember> getReqMembersList() { if (reqMembersBuilder_ == null) { return java.util.Collections.unmodifiableList(reqMembers_); } else { return reqMembersBuilder_.getMessageList(); } } public int getReqMembersCount() { if (reqMembersBuilder_ == null) { return reqMembers_.size(); } else { return reqMembersBuilder_.getCount(); } } public com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember getReqMembers(int index) { if (reqMembersBuilder_ == null) { return reqMembers_.get(index); } else { return reqMembersBuilder_.getMessage(index); } } public Builder setReqMembers( int index, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember value) { if (reqMembersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReqMembersIsMutable(); reqMembers_.set(index, value); onChanged(); } else { reqMembersBuilder_.setMessage(index, value); } return this; } public Builder setReqMembers( int index, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder builderForValue) { if (reqMembersBuilder_ == null) { ensureReqMembersIsMutable(); reqMembers_.set(index, builderForValue.build()); onChanged(); } else { reqMembersBuilder_.setMessage(index, builderForValue.build()); } return this; } public Builder addReqMembers(com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember value) { if (reqMembersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReqMembersIsMutable(); reqMembers_.add(value); onChanged(); } else { reqMembersBuilder_.addMessage(value); } return this; } public Builder addReqMembers( int index, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember value) { if (reqMembersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReqMembersIsMutable(); reqMembers_.add(index, value); onChanged(); } else { reqMembersBuilder_.addMessage(index, value); } return this; } public Builder addReqMembers( com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder builderForValue) { if (reqMembersBuilder_ == null) { ensureReqMembersIsMutable(); reqMembers_.add(builderForValue.build()); onChanged(); } else { reqMembersBuilder_.addMessage(builderForValue.build()); } return this; } public Builder addReqMembers( int index, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder builderForValue) { if (reqMembersBuilder_ == null) { ensureReqMembersIsMutable(); reqMembers_.add(index, builderForValue.build()); onChanged(); } else { reqMembersBuilder_.addMessage(index, builderForValue.build()); } return this; } public Builder addAllReqMembers( java.lang.Iterable<? extends com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember> values) { if (reqMembersBuilder_ == null) { ensureReqMembersIsMutable(); super.addAll(values, reqMembers_); onChanged(); } else { reqMembersBuilder_.addAllMessages(values); } return this; } public Builder clearReqMembers() { if (reqMembersBuilder_ == null) { reqMembers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { reqMembersBuilder_.clear(); } return this; } public Builder removeReqMembers(int index) { if (reqMembersBuilder_ == null) { ensureReqMembersIsMutable(); reqMembers_.remove(index); onChanged(); } else { reqMembersBuilder_.remove(index); } return this; } public com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder getReqMembersBuilder( int index) { return getReqMembersFieldBuilder().getBuilder(index); } public com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder getReqMembersOrBuilder( int index) { if (reqMembersBuilder_ == null) { return reqMembers_.get(index); } else { return reqMembersBuilder_.getMessageOrBuilder(index); } } public java.util.List<? extends com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder> getReqMembersOrBuilderList() { if (reqMembersBuilder_ != null) { return reqMembersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(reqMembers_); } } public com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder addReqMembersBuilder() { return getReqMembersFieldBuilder().addBuilder( com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.getDefaultInstance()); } public com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder addReqMembersBuilder( int index) { return getReqMembersFieldBuilder().addBuilder( index, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.getDefaultInstance()); } public java.util.List<com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder> getReqMembersBuilderList() { return getReqMembersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder> getReqMembersFieldBuilder() { if (reqMembersBuilder_ == null) { reqMembersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMember.Builder, com.xinqihd.sns.gameserver.proto.XinqiGuildMember.GuildMemberOrBuilder>( reqMembers_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); reqMembers_ = null; } return reqMembersBuilder_; } // @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BseGuildReqMemberList) } static { defaultInstance = new BseGuildReqMemberList(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BseGuildReqMemberList) } private static com.google.protobuf.Descriptors.Descriptor internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\033BseGuildReqMemberList.proto\022 com.xinqi" + "hd.sns.gameserver.proto\032\021GuildMember.pro" + "to\"Z\n\025BseGuildReqMemberList\022A\n\nreqMember" + "s\030\001 \003(\0132-.com.xinqihd.sns.gameserver.pro" + "to.GuildMemberB\034B\032XinqiBseGuildReqMember" + "List" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_com_xinqihd_sns_gameserver_proto_BseGuildReqMemberList_descriptor, new java.lang.String[] { "ReqMembers", }, com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList.class, com.xinqihd.sns.gameserver.proto.XinqiBseGuildReqMemberList.BseGuildReqMemberList.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.xinqihd.sns.gameserver.proto.XinqiGuildMember.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.segmentstore.server.logs; import io.pravega.common.Exceptions; import io.pravega.common.ObjectClosedException; import io.pravega.common.function.Callbacks; import io.pravega.common.util.ByteArraySegment; import io.pravega.segmentstore.server.TestDurableDataLog; import io.pravega.test.common.AssertExtensions; import io.pravega.test.common.ErrorInjector; import io.pravega.test.common.IntentionalException; import io.pravega.test.common.TestUtils; import io.pravega.test.common.ThreadPooledTestSuite; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Predicate; import java.util.stream.Collectors; import lombok.Cleanup; import lombok.val; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; /** * Unit tests for DataFrameBuilder class. */ public class DataFrameBuilderTests extends ThreadPooledTestSuite { private static final int CONTAINER_ID = 1234567; private static final Duration TIMEOUT = Duration.ofSeconds(30); private static final int SMALL_RECORD_MIN_SIZE = 0; private static final int SMALL_RECORD_MAX_SIZE = 128; private static final int LARGE_RECORD_MIN_SIZE = 1024; private static final int LARGE_RECORD_MAX_SIZE = 10240; private static final int FRAME_SIZE = 512; private static final int APPEND_DELAY_MILLIS = 1; private static final int RECORD_COUNT = 200; private static final TestLogItem.TestLogItemSerializer SERIALIZER = new TestLogItem.TestLogItemSerializer(); @Rule public Timeout globalTimeout = Timeout.seconds(TIMEOUT.getSeconds()); @Override protected int getThreadPoolSize() { return 5; } /** * Tests the happy case: append a set of LogItems, and make sure that frames that get output contain all of them. * For this test, there is no delay in the DurableDataLog append implementations - it is as close to sync as possible. */ @Test public void testAppendNoFailureNoDelay() throws Exception { testAppendNoFailure(0); } /** * Tests the happy case: append a set of LogItems, and make sure that frames that get output contain all of them. * For this test, there is no delay in the DurableDataLog append implementations - it is as close to sync as possible. */ @Test public void testAppendNoFailureWithDelay() throws Exception { testAppendNoFailure(APPEND_DELAY_MILLIS); } /** * Tests the case when the appends fail because of Serialization failures. * Serialization errors should only affect the append that caused it. It should not cause any data to be dropped * or put the DataFrameBuilder in a stuck state. * This should be done both with large and with small LogItems. Large items span multiple frames. */ @Test public void testAppendWithSerializationFailure() throws Exception { int failEvery = 7; // Fail every X records. ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0); records.addAll(DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size())); // Have every other 'failEvery' record fail after writing 90% of itself. for (int i = 0; i < records.size(); i += failEvery) { records.get(i).failSerializationAfterComplete(0.9, new IOException("intentional " + i)); } HashSet<Integer> failedIndices = new HashSet<>(); val order = new HashMap<DataFrameBuilder.CommitArgs, Integer>(); try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) { dataLog.initialize(TIMEOUT); List<DataFrameBuilder.CommitArgs> commitFrames = Collections.synchronizedList(new ArrayList<>()); BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex)); val args = new DataFrameBuilder.Args(DataFrameTestHelpers.appendOrder(order), commitFrames::add, errorCallback, executorService()); try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) { for (int i = 0; i < records.size(); i++) { try { b.append(records.get(i)); } catch (IOException ex) { failedIndices.add(i); } } } // Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed. TestUtils.await(() -> commitFrames.size() >= order.size(), 20, TIMEOUT.toMillis()); List<DataFrame.DataFrameEntryIterator> frames = dataLog.getAllEntries(readItem -> DataFrame.read(readItem.getPayload(), readItem.getLength(), readItem.getAddress())); Assert.assertEquals("Unexpected number of frames generated.", commitFrames.size(), frames.size()); // Check the correctness of the commit callback. AssertExtensions.assertGreaterThan("Not enough Data Frames were generated.", 1, commitFrames.size()); AssertExtensions.assertGreaterThan("Not enough LogItems were failed.", records.size() / failEvery, failedIndices.size()); DataFrameTestHelpers.checkReadRecords(frames, records, failedIndices, r -> new ByteArraySegment(r.getFullSerialization())); } } /** * Tests the case when the DataLog fails to commit random frames. * Commit errors should affect only the LogItems that were part of it. It should cause data to be dropped * and affected appends failed. * This should be done both with large and with small LogItems. Large items span multiple frames. */ @Test public void testAppendWithCommitFailure() throws Exception { int failAt = 7; // Fail the commit to DurableDataLog after this many writes. List<TestLogItem> records = DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0); records.addAll(DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size())); @Cleanup TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService()); dataLog.initialize(TIMEOUT); val asyncInjector = new ErrorInjector<Exception>(count -> count >= failAt, IntentionalException::new); dataLog.setAppendErrorInjectors(null, asyncInjector); AtomicInteger failCount = new AtomicInteger(); List<DataFrameBuilder.CommitArgs> successCommits = Collections.synchronizedList(new ArrayList<>()); // Keep a reference to the builder (once created) so we can inspect its failure cause). val builderRef = new AtomicReference<DataFrameBuilder<TestLogItem>>(); val attemptCount = new AtomicInteger(); BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> { attemptCount.decrementAndGet(); // Check that we actually did want an exception to happen. Throwable expectedError = Exceptions.unwrap(asyncInjector.getLastCycleException()); Assert.assertNotNull("An error happened but none was expected: " + ex, expectedError); Throwable actualError = Exceptions.unwrap(ex); if (!(ex instanceof ObjectClosedException)) { // First failure. Assert.assertEquals("Unexpected error occurred upon commit.", expectedError, actualError); } if (builderRef.get().failureCause() != null) { checkFailureCause(builderRef.get(), ce -> ce instanceof IntentionalException); } failCount.incrementAndGet(); }; val args = new DataFrameBuilder.Args(ca -> attemptCount.incrementAndGet(), successCommits::add, errorCallback, executorService()); try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) { builderRef.set(b); try { for (val r : records) { b.append(r); } b.close(); } catch (ObjectClosedException ex) { TestUtils.await(() -> b.failureCause() != null, 20, TIMEOUT.toMillis()); // If DataFrameBuilder is closed, then we must have had an exception thrown via the callback before. Assert.assertNotNull("DataFrameBuilder is closed, yet failure cause is not set yet.", b.failureCause()); checkFailureCause(b, ce -> ce instanceof IntentionalException); } } TestUtils.await(() -> successCommits.size() >= attemptCount.get(), 20, TIMEOUT.toMillis()); // Read all committed items. @Cleanup val reader = new DataFrameReader<>(dataLog, new TestSerializer(), CONTAINER_ID); val readItems = new ArrayList<TestLogItem>(); DataFrameRecord<TestLogItem> readItem; while ((readItem = reader.getNext()) != null) { readItems.add(readItem.getItem()); } val lastCommitSeqNo = successCommits.stream().mapToLong(DataFrameBuilder.CommitArgs::getLastFullySerializedSequenceNumber).max().orElse(-1); val expectedItems = records.stream().filter(r -> r.getSequenceNumber() <= lastCommitSeqNo).collect(Collectors.toList()); AssertExtensions.assertListEquals("Items read back do not match expected values.", expectedItems, readItems, TestLogItem::equals); // Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed. val frames = dataLog.getAllEntries(ri -> DataFrame.read(ri.getPayload(), ri.getLength(), ri.getAddress())); // Check the correctness of the commit callback. AssertExtensions.assertGreaterThan("Not enough Data Frames were generated.", 1, frames.size()); Assert.assertEquals("Unexpected number of frames generated.", successCommits.size(), frames.size()); } private void checkFailureCause(DataFrameBuilder<TestLogItem> builder, Predicate<Throwable> exceptionTester) { Throwable causingException = builder.failureCause(); Assert.assertTrue("Unexpected failure cause for DataFrameBuilder: " + builder.failureCause(), exceptionTester.test(causingException)); } /** * Tests the flush() method. */ @Test public void testFlush() throws Exception { // Append two records, make sure they are not flushed, close the Builder, then make sure they are flushed. try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) { dataLog.initialize(TIMEOUT); ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0); List<DataFrameBuilder.CommitArgs> commitFrames = Collections.synchronizedList(new ArrayList<>()); BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex)); val args = new DataFrameBuilder.Args(Callbacks::doNothing, commitFrames::add, errorCallback, executorService()); @Cleanup DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args); for (TestLogItem item : records) { b.append(item); } // Check the correctness of the commit callback. Assert.assertEquals("A Data Frame was generated but none was expected yet.", 0, commitFrames.size()); // Invoke flush. b.flush(); // Wait for all the frames commit callbacks to be invoked. TestUtils.await(() -> commitFrames.size() >= 1, 20, TIMEOUT.toMillis()); // Check the correctness of the commit callback (after closing the builder). Assert.assertEquals("Exactly one Data Frame was expected so far.", 1, commitFrames.size()); //Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed. val frames = dataLog.getAllEntries(readItem -> DataFrame.read(readItem.getPayload(), readItem.getLength(), readItem.getAddress())); Assert.assertEquals("Unexpected number of frames generated.", commitFrames.size(), frames.size()); DataFrameTestHelpers.checkReadRecords(frames, records, r -> new ByteArraySegment(r.getFullSerialization())); } } private void testAppendNoFailure(int delayMillis) throws Exception { // Happy case: append a bunch of data, and make sure the frames that get output contain it. ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0); records.addAll(DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size())); try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, delayMillis, executorService())) { dataLog.initialize(TIMEOUT); val order = new HashMap<DataFrameBuilder.CommitArgs, Integer>(); List<DataFrameBuilder.CommitArgs> commitFrames = Collections.synchronizedList(new ArrayList<>()); BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex)); val args = new DataFrameBuilder.Args(DataFrameTestHelpers.appendOrder(order), commitFrames::add, errorCallback, executorService()); try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) { for (TestLogItem item : records) { b.append(item); } b.close(); } // Wait for all the frames commit callbacks to be invoked. Even though the DataFrameBuilder waits (upon close) // for the OrderedItemProcessor to finish, there are other callbacks chained that need to be completed (such // as the one collecting frames in the list above). TestUtils.await(() -> commitFrames.size() >= order.size(), delayMillis, TIMEOUT.toMillis()); // It is quite likely that acks will arrive out of order. The DataFrameBuilder has no responsibility for // rearrangement; that should be done by its user. commitFrames.sort(Comparator.comparingInt(order::get)); // Check the correctness of the commit callback. AssertExtensions.assertGreaterThan("Not enough Data Frames were generated.", 1, commitFrames.size()); DataFrameBuilder.CommitArgs previousCommitArgs = null; for (val ca : commitFrames) { if (previousCommitArgs != null) { AssertExtensions.assertGreaterThanOrEqual("CommitArgs.getLastFullySerializedSequenceNumber() is not monotonically increasing.", previousCommitArgs.getLastFullySerializedSequenceNumber(), ca.getLastFullySerializedSequenceNumber()); AssertExtensions.assertGreaterThanOrEqual("CommitArgs.getLastStartedSequenceNumber() is not monotonically increasing.", previousCommitArgs.getLastStartedSequenceNumber(), ca.getLastStartedSequenceNumber()); AssertExtensions.assertGreaterThanOrEqual("CommitArgs.getLogAddress() is not monotonically increasing.", previousCommitArgs.getLogAddress().getSequence(), ca.getLogAddress().getSequence()); } previousCommitArgs = ca; } //Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed. val frames = dataLog.getAllEntries(readItem -> DataFrame.read(readItem.getPayload(), readItem.getLength(), readItem.getAddress())); DataFrameTestHelpers.checkReadRecords(frames, records, r -> new ByteArraySegment(r.getFullSerialization())); } } }
package pl.droidsonroids.gif; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.os.Build; import android.os.Parcelable; import android.util.AttributeSet; import android.util.TypedValue; import android.view.Surface; import android.view.TextureView; import android.widget.ImageView.ScaleType; import androidx.annotation.FloatRange; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import java.io.IOException; import java.lang.ref.WeakReference; import me.panpf.sketch.gif.R; /** * <p>{@link TextureView} which can display animated GIFs. GifTextureView can only be used in a * hardware accelerated window. When rendered in software, GifTextureView will draw nothing.</p> * <p>GIF source can be specified in XML or by calling {@link #setInputSource(InputSource)}</p> * <pre> {@code * <pl.droidsonroids.gif.GifTextureView * xmlns:app="http://schemas.android.com/apk/res-auto" * android:id="@+id/gif_texture_view" * android:scaleType="fitEnd" * app:gifSource="@drawable/animation" * android:layout_width="match_parent" * android:layout_height="match_parent"> } * </pre> * Note that <b>src</b> attribute comes from app namespace (you can call it whatever you want) not from * android one. Drawable, raw, mipmap resources and assets can be specified through XML. If value is a string * (referenced from resources or entered directly) it will be treated as an asset. * <p>Unlike {@link TextureView} GifTextureView is transparent by default, but it can be changed by * {@link #setOpaque(boolean)}. * You can use scale types the same way as in {@link android.widget.ImageView}.</p> */ public class GifTextureView extends TextureView { private static final ScaleType[] sScaleTypeArray = { ScaleType.MATRIX, ScaleType.FIT_XY, ScaleType.FIT_START, ScaleType.FIT_CENTER, ScaleType.FIT_END, ScaleType.CENTER, ScaleType.CENTER_CROP, ScaleType.CENTER_INSIDE }; private ScaleType mScaleType = ScaleType.FIT_CENTER; private final Matrix mTransform = new Matrix(); private InputSource mInputSource; private RenderThread mRenderThread; private float mSpeedFactor = 1f; private GifViewUtils.GifViewAttributes viewAttributes; public GifTextureView(Context context) { super(context); init(null, 0, 0); } public GifTextureView(Context context, AttributeSet attrs) { super(context, attrs); init(attrs, 0, 0); } public GifTextureView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(attrs, defStyleAttr, 0); } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) public GifTextureView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(attrs, defStyleAttr, defStyleRes); } private void init(AttributeSet attrs, int defStyleAttr, int defStyleRes) { if (attrs != null) { final int scaleTypeIndex = attrs.getAttributeIntValue(GifViewUtils.ANDROID_NS, "scaleType", -1); if (scaleTypeIndex >= 0 && scaleTypeIndex < sScaleTypeArray.length) { mScaleType = sScaleTypeArray[scaleTypeIndex]; } final TypedArray textureViewAttributes = getContext().obtainStyledAttributes(attrs, R.styleable .GifTextureView, defStyleAttr, defStyleRes); mInputSource = findSource(textureViewAttributes); super.setOpaque(textureViewAttributes.getBoolean(R.styleable.GifTextureView_isOpaque, false)); textureViewAttributes.recycle(); viewAttributes = new GifViewUtils.GifViewAttributes(this, attrs, defStyleAttr, defStyleRes); } else { super.setOpaque(false); viewAttributes = new GifViewUtils.GifViewAttributes(); } if (!isInEditMode()) { mRenderThread = new RenderThread(this); if (mInputSource != null) { mRenderThread.start(); } } } /** * Always throws {@link UnsupportedOperationException}. Changing {@link SurfaceTextureListener} * is not supported. * * @param listener ignored */ @Override public void setSurfaceTextureListener(SurfaceTextureListener listener) { throw new UnsupportedOperationException("Changing SurfaceTextureListener is not supported"); } /** * Always returns null since changing {@link SurfaceTextureListener} is not supported. * * @return always null */ @Override public SurfaceTextureListener getSurfaceTextureListener() { return null; } /** * Always throws {@link UnsupportedOperationException}. Changing {@link SurfaceTexture} is not * supported. * * @param surfaceTexture ignored */ @Override public void setSurfaceTexture(SurfaceTexture surfaceTexture) { throw new UnsupportedOperationException("Changing SurfaceTexture is not supported"); } private static InputSource findSource(final TypedArray textureViewAttributes) { final TypedValue value = new TypedValue(); if (!textureViewAttributes.getValue(R.styleable.GifTextureView_gifSource, value)) { return null; } if (value.resourceId != 0) { final String resourceTypeName = textureViewAttributes.getResources().getResourceTypeName(value.resourceId); if (GifViewUtils.SUPPORTED_RESOURCE_TYPE_NAMES.contains(resourceTypeName)) { return new InputSource.ResourcesSource(textureViewAttributes.getResources(), value.resourceId); } else if (!"string".equals(resourceTypeName)) { throw new IllegalArgumentException( "Expected string, drawable, mipmap or raw resource type. '" + resourceTypeName + "' is not supported"); } } return new InputSource.AssetSource(textureViewAttributes.getResources().getAssets(), value.string.toString()); } private static class RenderThread extends Thread implements SurfaceTextureListener { final ConditionVariable isSurfaceValid = new ConditionVariable(); private GifInfoHandle mGifInfoHandle = new GifInfoHandle(); private IOException mIOException; long[] mSavedState; private final WeakReference<GifTextureView> mGifTextureViewReference; RenderThread(final GifTextureView gifTextureView) { super("GifRenderThread"); mGifTextureViewReference = new WeakReference<>(gifTextureView); } @Override public void run() { try { final GifTextureView gifTextureView = mGifTextureViewReference.get(); if (gifTextureView == null) { return; } mGifInfoHandle = gifTextureView.mInputSource.open(); mGifInfoHandle.setOptions((char) 1, gifTextureView.isOpaque()); if (gifTextureView.viewAttributes.mLoopCount >= 0) { mGifInfoHandle.setLoopCount(gifTextureView.viewAttributes.mLoopCount); } } catch (IOException ex) { mIOException = ex; return; } final GifTextureView gifTextureView = mGifTextureViewReference.get(); if (gifTextureView == null) { mGifInfoHandle.recycle(); return; } gifTextureView.setSuperSurfaceTextureListener(this); final boolean isSurfaceAvailable = gifTextureView.isAvailable(); isSurfaceValid.set(isSurfaceAvailable); if (isSurfaceAvailable) { gifTextureView.post(new Runnable() { @Override public void run() { gifTextureView.updateTextureViewSize(mGifInfoHandle); } }); } mGifInfoHandle.setSpeedFactor(gifTextureView.mSpeedFactor); while (!isInterrupted()) { try { isSurfaceValid.block(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } final GifTextureView currentGifTextureView = mGifTextureViewReference.get(); if (currentGifTextureView == null) { break; } final SurfaceTexture surfaceTexture = currentGifTextureView.getSurfaceTexture(); if (surfaceTexture == null) { continue; } final Surface surface = new Surface(surfaceTexture); try { mGifInfoHandle.bindSurface(surface, mSavedState); } finally { surface.release(); } } mGifInfoHandle.recycle(); mGifInfoHandle = new GifInfoHandle(); } @Override public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { final GifTextureView gifTextureView = mGifTextureViewReference.get(); if (gifTextureView != null) { gifTextureView.updateTextureViewSize(mGifInfoHandle); } isSurfaceValid.open(); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { //no-op } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { isSurfaceValid.close(); mGifInfoHandle.postUnbindSurface(); interrupt(); return true; } @Override public void onSurfaceTextureUpdated(SurfaceTexture surface) { //no-op } void dispose(@NonNull final GifTextureView gifTextureView, @Nullable final PlaceholderDrawListener drawer) { isSurfaceValid.close(); final SurfaceTextureListener listener = drawer != null ? new PlaceholderDrawingSurfaceTextureListener(drawer) : null; gifTextureView.setSuperSurfaceTextureListener(listener); mGifInfoHandle.postUnbindSurface(); interrupt(); } } private void setSuperSurfaceTextureListener(SurfaceTextureListener listener) { super.setSurfaceTextureListener(listener); } /** * Indicates whether the content of this GifTextureView is opaque. The * content is assumed to be <b>non-opaque</b> by default (unlike {@link TextureView}. * View that is known to be opaque can take a faster drawing case than non-opaque one.<br> * Opacity change will cause animation to restart. * * @param opaque True if the content of this GifTextureView is opaque, * false otherwise */ @Override public void setOpaque(boolean opaque) { if (opaque != isOpaque()) { super.setOpaque(opaque); setInputSource(mInputSource); } } @Override protected void onDetachedFromWindow() { mRenderThread.dispose(this, null); super.onDetachedFromWindow(); final SurfaceTexture surfaceTexture = getSurfaceTexture(); if (surfaceTexture != null) { surfaceTexture.release(); } } /** * Sets the source of the animation. Pass {@code null} to remove current source. * Equivalent of {@code setInputSource(inputSource, null)}. * * @param inputSource new animation source, may be null */ public synchronized void setInputSource(@Nullable InputSource inputSource) { setInputSource(inputSource, null); } /** * Sets the source of the animation and optionally placeholder drawer. Pass {@code null inputSource} to remove current source. * {@code placeholderDrawListener} is overwritten on {@code setInputSource(inputSource)} call. * * @param inputSource new animation source, may be null * @param placeholderDrawListener placeholder draw listener, may be null */ public synchronized void setInputSource(@Nullable InputSource inputSource, @Nullable PlaceholderDrawListener placeholderDrawListener) { mRenderThread.dispose(this, placeholderDrawListener); try { mRenderThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } mInputSource = inputSource; mRenderThread = new RenderThread(this); if (inputSource != null) { mRenderThread.start(); } } /** * Equivalent of {@link GifDrawable#setSpeed(float)}. * * @param factor new speed factor, eg. 0.5f means half speed, 1.0f - normal, 2.0f - double speed * @throws IllegalArgumentException if {@code factor <= 0} * @see GifDrawable#setSpeed(float) */ public void setSpeed(@FloatRange(from = 0, fromInclusive = false) float factor) { mSpeedFactor = factor; mRenderThread.mGifInfoHandle.setSpeedFactor(factor); } /** * Returns last {@link IOException} occurred during loading or playing GIF (in such case only {@link GifIOException} * can be returned. Null is returned when source is not set, surface was not yet created or no error * occurred. * * @return exception occurred during loading or playing GIF or null */ @Nullable public IOException getIOException() { if (mRenderThread.mIOException != null) { return mRenderThread.mIOException; } else { return GifIOException.fromCode(mRenderThread.mGifInfoHandle.getNativeErrorCode()); } } /** * Controls how the image should be resized or moved to match the size * of this GifTextureView. * * @param scaleType The desired scaling mode. */ public void setScaleType(@NonNull ScaleType scaleType) { mScaleType = scaleType; updateTextureViewSize(mRenderThread.mGifInfoHandle); } /** * @return the current scale type in use by this View. * @see ScaleType */ public ScaleType getScaleType() { return mScaleType; } private void updateTextureViewSize(final GifInfoHandle gifInfoHandle) { final Matrix transform = new Matrix(); final float viewWidth = getWidth(); final float viewHeight = getHeight(); final float scaleRef; final float scaleX = gifInfoHandle.getWidth() / viewWidth; final float scaleY = gifInfoHandle.getHeight() / viewHeight; RectF src = new RectF(0, 0, gifInfoHandle.getWidth(), gifInfoHandle.getHeight()); RectF dst = new RectF(0, 0, viewWidth, viewHeight); switch (mScaleType) { case CENTER: transform.setScale(scaleX, scaleY, viewWidth / 2, viewHeight / 2); break; case CENTER_CROP: scaleRef = 1 / Math.min(scaleX, scaleY); transform.setScale(scaleRef * scaleX, scaleRef * scaleY, viewWidth / 2, viewHeight / 2); break; case CENTER_INSIDE: if (gifInfoHandle.getWidth() <= viewWidth && gifInfoHandle.getHeight() <= viewHeight) { scaleRef = 1.0f; } else { scaleRef = Math.min(1 / scaleX, 1 / scaleY); } transform.setScale(scaleRef * scaleX, scaleRef * scaleY, viewWidth / 2, viewHeight / 2); break; case FIT_CENTER: transform.setRectToRect(src, dst, Matrix.ScaleToFit.CENTER); transform.preScale(scaleX, scaleY); break; case FIT_END: transform.setRectToRect(src, dst, Matrix.ScaleToFit.END); transform.preScale(scaleX, scaleY); break; case FIT_START: transform.setRectToRect(src, dst, Matrix.ScaleToFit.START); transform.preScale(scaleX, scaleY); break; case FIT_XY: return; case MATRIX: transform.set(mTransform); transform.preScale(scaleX, scaleY); break; } super.setTransform(transform); } /** * Wrapper of {@link #setTransform(Matrix)}. Introduced to preserve the same API as in * {@link GifImageView}. * * @param matrix The transform to apply to the content of this view. */ public void setImageMatrix(Matrix matrix) { setTransform(matrix); } /** * Works like {@link TextureView#setTransform(Matrix)} but transform will take effect only if * scale type is set to {@link ScaleType#MATRIX} through XML attribute or via {@link #setScaleType(ScaleType)} * * @param transform The transform to apply to the content of this view. */ @Override public void setTransform(Matrix transform) { mTransform.set(transform); updateTextureViewSize(mRenderThread.mGifInfoHandle); } /** * Returns the transform associated with this texture view, either set explicitly by {@link #setTransform(Matrix)} * or computed according to the current scale type. * * @param transform The {@link Matrix} in which to copy the current transform. Can be null. * @return The specified matrix if not null or a new {@link Matrix} instance otherwise. * @see #setTransform(android.graphics.Matrix) * @see #setScaleType(ScaleType) */ @Override public Matrix getTransform(Matrix transform) { if (transform == null) { transform = new Matrix(); } transform.set(mTransform); return transform; } @Override public Parcelable onSaveInstanceState() { mRenderThread.mSavedState = mRenderThread.mGifInfoHandle.getSavedState(); return new GifViewSavedState(super.onSaveInstanceState(), viewAttributes.freezesAnimation ? mRenderThread.mSavedState : null); } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof GifViewSavedState)) { super.onRestoreInstanceState(state); return; } GifViewSavedState ss = (GifViewSavedState) state; super.onRestoreInstanceState(ss.getSuperState()); mRenderThread.mSavedState = ss.mStates[0]; } /** * Sets whether animation position is saved in {@link #onSaveInstanceState()} and restored * in {@link #onRestoreInstanceState(Parcelable)} * * @param freezesAnimation whether animation position is saved */ public void setFreezesAnimation(boolean freezesAnimation) { viewAttributes.freezesAnimation = freezesAnimation; } /** * This listener can be used to be notified when the {@link GifTextureView} content placeholder can be drawn. * Placeholder is displayed before proper input source is loaded and remains visible when input source loading fails. */ public interface PlaceholderDrawListener { /** * Called when surface is ready and placeholder has to be drawn. * It may occur more than once (eg. if {@code View} visibility is toggled before input source is loaded) * or never (eg. when {@code View} is never visible).<br> * Note that it is an error to use {@code canvas} after this method return. * * @param canvas canvas to draw into */ void onDrawPlaceholder(Canvas canvas); } }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.modules.network; import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.GuardedAsyncTask; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.WritableArray; import com.facebook.react.bridge.WritableMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.stetho.okhttp.StethoInterceptor; import com.squareup.okhttp.Callback; import com.squareup.okhttp.Headers; import com.squareup.okhttp.MediaType; import com.squareup.okhttp.MultipartBuilder; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Request; import com.squareup.okhttp.RequestBody; import com.squareup.okhttp.Response; import com.squareup.okhttp.ResponseBody; import static java.lang.Math.min; /** * Implements the XMLHttpRequest JavaScript interface. */ public final class NetworkingModule extends ReactContextBaseJavaModule { private static final String CONTENT_ENCODING_HEADER_NAME = "content-encoding"; private static final String CONTENT_TYPE_HEADER_NAME = "content-type"; private static final String REQUEST_BODY_KEY_STRING = "string"; private static final String REQUEST_BODY_KEY_URI = "uri"; private static final String REQUEST_BODY_KEY_FORMDATA = "formData"; private static final String USER_AGENT_HEADER_NAME = "user-agent"; private static final int MIN_BUFFER_SIZE = 8 * 1024; // 8kb private static final int MAX_BUFFER_SIZE = 512 * 1024; // 512kb private static final int CHUNK_TIMEOUT_NS = 100 * 1000000; // 100ms private final OkHttpClient mClient; private final ForwardingCookieHandler mCookieHandler; private final @Nullable String mDefaultUserAgent; private boolean mShuttingDown; /* package */ NetworkingModule( ReactApplicationContext reactContext, @Nullable String defaultUserAgent, OkHttpClient client) { super(reactContext); mClient = client; mClient.networkInterceptors().add(new StethoInterceptor()); mCookieHandler = new ForwardingCookieHandler(reactContext); mShuttingDown = false; mDefaultUserAgent = defaultUserAgent; } /** * @param context the ReactContext of the application */ public NetworkingModule(final ReactApplicationContext context) { this(context, null, OkHttpClientProvider.getOkHttpClient()); } /** * @param context the ReactContext of the application * @param defaultUserAgent the User-Agent header that will be set for all requests where the * caller does not provide one explicitly */ public NetworkingModule(ReactApplicationContext context, String defaultUserAgent) { this(context, defaultUserAgent, OkHttpClientProvider.getOkHttpClient()); } public NetworkingModule(ReactApplicationContext reactContext, OkHttpClient client) { this(reactContext, null, client); } @Override public void initialize() { mClient.setCookieHandler(mCookieHandler); } @Override public String getName() { return "RCTNetworking"; } @Override public void onCatalystInstanceDestroy() { mShuttingDown = true; mClient.cancel(null); mCookieHandler.destroy(); mClient.setCookieHandler(null); } @ReactMethod public void sendRequest( String method, String url, final int requestId, ReadableArray headers, ReadableMap data, final boolean useIncrementalUpdates) { Request.Builder requestBuilder = new Request.Builder().url(url); if (requestId != 0) { requestBuilder.tag(requestId); } Headers requestHeaders = extractHeaders(headers, data); if (requestHeaders == null) { onRequestError(requestId, "Unrecognized headers format"); return; } String contentType = requestHeaders.get(CONTENT_TYPE_HEADER_NAME); String contentEncoding = requestHeaders.get(CONTENT_ENCODING_HEADER_NAME); requestBuilder.headers(requestHeaders); if (data == null) { requestBuilder.method(method, RequestBodyUtil.getEmptyBody(method)); } else if (data.hasKey(REQUEST_BODY_KEY_STRING)) { if (contentType == null) { onRequestError(requestId, "Payload is set but no content-type header specified"); return; } String body = data.getString(REQUEST_BODY_KEY_STRING); MediaType contentMediaType = MediaType.parse(contentType); if (RequestBodyUtil.isGzipEncoding(contentEncoding)) { RequestBody requestBody = RequestBodyUtil.createGzip(contentMediaType, body); if (requestBody == null) { onRequestError(requestId, "Failed to gzip request body"); return; } requestBuilder.method(method, requestBody); } else { requestBuilder.method(method, RequestBody.create(contentMediaType, body)); } } else if (data.hasKey(REQUEST_BODY_KEY_URI)) { if (contentType == null) { onRequestError(requestId, "Payload is set but no content-type header specified"); return; } String uri = data.getString(REQUEST_BODY_KEY_URI); InputStream fileInputStream = RequestBodyUtil.getFileInputStream(getReactApplicationContext(), uri); if (fileInputStream == null) { onRequestError(requestId, "Could not retrieve file for uri " + uri); return; } requestBuilder.method( method, RequestBodyUtil.create(MediaType.parse(contentType), fileInputStream)); } else if (data.hasKey(REQUEST_BODY_KEY_FORMDATA)) { if (contentType == null) { contentType = "multipart/form-data"; } ReadableArray parts = data.getArray(REQUEST_BODY_KEY_FORMDATA); MultipartBuilder multipartBuilder = constructMultipartBody(parts, contentType, requestId); if (multipartBuilder == null) { return; } requestBuilder.method(method, multipartBuilder.build()); } else { // Nothing in data payload, at least nothing we could understand anyway. // Ignore and treat it as if it were null. requestBuilder.method(method, null); } mClient.newCall(requestBuilder.build()).enqueue( new Callback() { @Override public void onFailure(Request request, IOException e) { if (mShuttingDown) { return; } onRequestError(requestId, e.getMessage()); } @Override public void onResponse(Response response) throws IOException { if (mShuttingDown) { return; } // Before we touch the body send headers to JS onResponseReceived(requestId, response); ResponseBody responseBody = response.body(); try { if (useIncrementalUpdates) { readWithProgress(requestId, responseBody); onRequestSuccess(requestId); } else { onDataReceived(requestId, responseBody.string()); onRequestSuccess(requestId); } } catch (IOException e) { onRequestError(requestId, e.getMessage()); } } }); } private void readWithProgress(int requestId, ResponseBody responseBody) throws IOException { Reader reader = responseBody.charStream(); try { StringBuilder sb = new StringBuilder(getBufferSize(responseBody)); char[] buffer = new char[MIN_BUFFER_SIZE]; int read; long last = System.nanoTime(); while ((read = reader.read(buffer)) != -1) { sb.append(buffer, 0, read); long now = System.nanoTime(); if (shouldDispatch(now, last)) { onDataReceived(requestId, sb.toString()); sb.setLength(0); last = now; } } if (sb.length() > 0) { onDataReceived(requestId, sb.toString()); } } finally { reader.close(); } } private static boolean shouldDispatch(long now, long last) { return last + CHUNK_TIMEOUT_NS < now; } private static int getBufferSize(ResponseBody responseBody) throws IOException { long length = responseBody.contentLength(); if (length == -1) { return MIN_BUFFER_SIZE; } else { return (int) min(length, MAX_BUFFER_SIZE); } } private void onDataReceived(int requestId, String data) { WritableArray args = Arguments.createArray(); args.pushInt(requestId); args.pushString(data); getEventEmitter().emit("didReceiveNetworkData", args); } private void onRequestError(int requestId, String error) { WritableArray args = Arguments.createArray(); args.pushInt(requestId); args.pushString(error); getEventEmitter().emit("didCompleteNetworkResponse", args); } private void onRequestSuccess(int requestId) { WritableArray args = Arguments.createArray(); args.pushInt(requestId); args.pushNull(); getEventEmitter().emit("didCompleteNetworkResponse", args); } private void onResponseReceived(int requestId, Response response) { WritableMap headers = translateHeaders(response.headers()); WritableArray args = Arguments.createArray(); args.pushInt(requestId); args.pushInt(response.code()); args.pushMap(headers); args.pushString(response.request().urlString()); getEventEmitter().emit("didReceiveNetworkResponse", args); } private static WritableMap translateHeaders(Headers headers) { WritableMap responseHeaders = Arguments.createMap(); for (int i = 0; i < headers.size(); i++) { String headerName = headers.name(i); // multiple values for the same header if (responseHeaders.hasKey(headerName)) { responseHeaders.putString( headerName, responseHeaders.getString(headerName) + ", " + headers.value(i)); } else { responseHeaders.putString(headerName, headers.value(i)); } } return responseHeaders; } @ReactMethod public void abortRequest(final int requestId) { // We have to use AsyncTask since this might trigger a NetworkOnMainThreadException, this is an // open issue on OkHttp: https://github.com/square/okhttp/issues/869 new GuardedAsyncTask<Void, Void>(getReactApplicationContext()) { @Override protected void doInBackgroundGuarded(Void... params) { mClient.cancel(requestId); } }.execute(); } @ReactMethod public void clearCookies(com.facebook.react.bridge.Callback callback) { mCookieHandler.clearCookies(callback); } private @Nullable MultipartBuilder constructMultipartBody( ReadableArray body, String contentType, int requestId) { MultipartBuilder multipartBuilder = new MultipartBuilder(); multipartBuilder.type(MediaType.parse(contentType)); for (int i = 0, size = body.size(); i < size; i++) { ReadableMap bodyPart = body.getMap(i); // Determine part's content type. ReadableArray headersArray = bodyPart.getArray("headers"); Headers headers = extractHeaders(headersArray, null); if (headers == null) { onRequestError(requestId, "Missing or invalid header format for FormData part."); return null; } MediaType partContentType = null; String partContentTypeStr = headers.get(CONTENT_TYPE_HEADER_NAME); if (partContentTypeStr != null) { partContentType = MediaType.parse(partContentTypeStr); // Remove the content-type header because MultipartBuilder gets it explicitly as an // argument and doesn't expect it in the headers array. headers = headers.newBuilder().removeAll(CONTENT_TYPE_HEADER_NAME).build(); } if (bodyPart.hasKey(REQUEST_BODY_KEY_STRING)) { String bodyValue = bodyPart.getString(REQUEST_BODY_KEY_STRING); multipartBuilder.addPart(headers, RequestBody.create(partContentType, bodyValue)); } else if (bodyPart.hasKey(REQUEST_BODY_KEY_URI)) { if (partContentType == null) { onRequestError(requestId, "Binary FormData part needs a content-type header."); return null; } String fileContentUriStr = bodyPart.getString(REQUEST_BODY_KEY_URI); InputStream fileInputStream = RequestBodyUtil.getFileInputStream(getReactApplicationContext(), fileContentUriStr); if (fileInputStream == null) { onRequestError(requestId, "Could not retrieve file for uri " + fileContentUriStr); return null; } multipartBuilder.addPart(headers, RequestBodyUtil.create(partContentType, fileInputStream)); } else { onRequestError(requestId, "Unrecognized FormData part."); } } return multipartBuilder; } /** * Extracts the headers from the Array. If the format is invalid, this method will return null. */ private @Nullable Headers extractHeaders( @Nullable ReadableArray headersArray, @Nullable ReadableMap requestData) { if (headersArray == null) { return null; } Headers.Builder headersBuilder = new Headers.Builder(); for (int headersIdx = 0, size = headersArray.size(); headersIdx < size; headersIdx++) { ReadableArray header = headersArray.getArray(headersIdx); if (header == null || header.size() != 2) { return null; } String headerName = header.getString(0); String headerValue = header.getString(1); headersBuilder.add(headerName, headerValue); } if (headersBuilder.get(USER_AGENT_HEADER_NAME) == null && mDefaultUserAgent != null) { headersBuilder.add(USER_AGENT_HEADER_NAME, mDefaultUserAgent); } // Sanitize content encoding header, supported only when request specify payload as string boolean isGzipSupported = requestData != null && requestData.hasKey(REQUEST_BODY_KEY_STRING); if (!isGzipSupported) { headersBuilder.removeAll(CONTENT_ENCODING_HEADER_NAME); } return headersBuilder.build(); } private DeviceEventManagerModule.RCTDeviceEventEmitter getEventEmitter() { return getReactApplicationContext() .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class); } }
package com.edwardvanraak.materialbarcodescanner; import android.app.Dialog; import android.hardware.Camera; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.view.WindowManager; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GoogleApiAvailability; import com.google.android.gms.vision.MultiProcessor; import com.google.android.gms.vision.barcode.Barcode; import com.google.android.gms.vision.barcode.BarcodeDetector; import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import java.io.IOException; import static junit.framework.Assert.assertNotNull; public class MaterialBarcodeScannerActivity extends AppCompatActivity { private static final int RC_HANDLE_GMS = 9001; private static final String TAG = "MaterialBarcodeScanner"; private MaterialBarcodeScanner mMaterialBarcodeScanner; private MaterialBarcodeScannerBuilder mMaterialBarcodeScannerBuilder; private BarcodeDetector barcodeDetector; private CameraSourcePreview mCameraSourcePreview; private GraphicOverlay<BarcodeGraphic> mGraphicOverlay; private SoundPoolPlayer mSoundPoolPlayer; /** * true if no further barcode should be detected or given as a result */ private boolean mDetectionConsumed = false; private boolean mFlashOn = false; @Override public void onCreate(Bundle bundle) { super.onCreate(bundle); if(getWindow() != null){ getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); }else{ Log.e(TAG, "Barcode scanner could not go into fullscreen mode!"); } setContentView(R.layout.barcode_capture); } @Subscribe(sticky = true, threadMode = ThreadMode.MAIN) public void onMaterialBarcodeScanner(MaterialBarcodeScanner materialBarcodeScanner){ this.mMaterialBarcodeScanner = materialBarcodeScanner; mMaterialBarcodeScannerBuilder = mMaterialBarcodeScanner.getMaterialBarcodeScannerBuilder(); barcodeDetector = mMaterialBarcodeScanner.getMaterialBarcodeScannerBuilder().getBarcodeDetector(); startCameraSource(); setupLayout(); } private void setupLayout() { final TextView topTextView = (TextView) findViewById(R.id.topText); assertNotNull(topTextView); String topText = mMaterialBarcodeScannerBuilder.getText(); if(!mMaterialBarcodeScannerBuilder.getText().equals("")){ topTextView.setText(topText); } setupButtons(); setupCenterTracker(); } private void setupCenterTracker() { if(mMaterialBarcodeScannerBuilder.getScannerMode() == MaterialBarcodeScanner.SCANNER_MODE_CENTER){ final ImageView centerTracker = (ImageView) findViewById(R.id.barcode_square); centerTracker.setImageResource(mMaterialBarcodeScannerBuilder.getTrackerResourceID()); mGraphicOverlay.setVisibility(View.INVISIBLE); } } private void updateCenterTrackerForDetectedState() { if(mMaterialBarcodeScannerBuilder.getScannerMode() == MaterialBarcodeScanner.SCANNER_MODE_CENTER){ final ImageView centerTracker = (ImageView) findViewById(R.id.barcode_square); runOnUiThread(new Runnable() { @Override public void run() { centerTracker.setImageResource(mMaterialBarcodeScannerBuilder.getTrackerDetectedResourceID()); } }); } } private void setupButtons() { final LinearLayout flashOnButton = (LinearLayout)findViewById(R.id.flashIconButton); final ImageView flashToggleIcon = (ImageView)findViewById(R.id.flashIcon); assertNotNull(flashOnButton); flashOnButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mFlashOn) { flashToggleIcon.setBackgroundResource(R.drawable.ic_flash_on_white_24dp); disableTorch(); } else { flashToggleIcon.setBackgroundResource(R.drawable.ic_flash_off_white_24dp); enableTorch(); } mFlashOn ^= true; } }); if(mMaterialBarcodeScannerBuilder.isFlashEnabledByDefault()){ flashToggleIcon.setBackgroundResource(R.drawable.ic_flash_off_white_24dp); } } /** * Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet * (e.g., because onResume was called before the camera source was created), this will be called * again when the camera source is created. */ private void startCameraSource() throws SecurityException { // check that the device has play services available. mSoundPoolPlayer = new SoundPoolPlayer(this); int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable( getApplicationContext()); if (code != ConnectionResult.SUCCESS) { Dialog dialog = GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS); dialog.show(); } mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>)findViewById(R.id.graphicOverlay); BarcodeGraphicTracker.NewDetectionListener listener = new BarcodeGraphicTracker.NewDetectionListener() { @Override public void onNewDetection(Barcode barcode) { if(!mDetectionConsumed){ mDetectionConsumed = true; Log.d(TAG, "Barcode detected! - " + barcode.displayValue); EventBus.getDefault().postSticky(barcode); updateCenterTrackerForDetectedState(); if(mMaterialBarcodeScannerBuilder.isBleepEnabled()){ mSoundPoolPlayer.playShortResource(R.raw.bleep); } mGraphicOverlay.postDelayed(new Runnable() { @Override public void run() { finish(); } },50); } } }; BarcodeTrackerFactory barcodeFactory = new BarcodeTrackerFactory(mGraphicOverlay, listener, mMaterialBarcodeScannerBuilder.getTrackerColor()); barcodeDetector.setProcessor(new MultiProcessor.Builder<>(barcodeFactory).build()); CameraSource mCameraSource = mMaterialBarcodeScannerBuilder.getCameraSource(); if (mCameraSource != null) { try { mCameraSourcePreview = (CameraSourcePreview) findViewById(R.id.preview); mCameraSourcePreview.start(mCameraSource, mGraphicOverlay); } catch (IOException e) { Log.e(TAG, "Unable to start camera source.", e); mCameraSource.release(); mCameraSource = null; } } } private void enableTorch() throws SecurityException{ mMaterialBarcodeScannerBuilder.getCameraSource().setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); try { mMaterialBarcodeScannerBuilder.getCameraSource().start(); } catch (IOException e) { e.printStackTrace(); } } private void disableTorch() throws SecurityException{ mMaterialBarcodeScannerBuilder.getCameraSource().setFlashMode(Camera.Parameters.FLASH_MODE_OFF); try { mMaterialBarcodeScannerBuilder.getCameraSource().start(); } catch (IOException e) { e.printStackTrace(); } } @Override public void onStart() { super.onStart(); EventBus.getDefault().register(this); } @Override public void onStop() { EventBus.getDefault().unregister(this); super.onStop(); } /** * Stops the camera. */ @Override protected void onPause() { super.onPause(); if (mCameraSourcePreview != null) { mCameraSourcePreview.stop(); } } /** * Releases the resources associated with the camera source, the associated detectors, and the * rest of the processing pipeline. */ @Override protected void onDestroy() { super.onDestroy(); if(isFinishing()){ clean(); } } private void clean() { EventBus.getDefault().removeStickyEvent(MaterialBarcodeScanner.class); if (mCameraSourcePreview != null) { mCameraSourcePreview.release(); mCameraSourcePreview = null; } if(mSoundPoolPlayer != null){ mSoundPoolPlayer.release(); mSoundPoolPlayer = null; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.update; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.engine.DocumentSourceMissingException; import org.elasticsearch.index.get.GetField; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.TTLFieldMapper; import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.LongSupplier; /** * Helper for translating an update request to an index, delete request or update response. */ public class UpdateHelper extends AbstractComponent { private final ScriptService scriptService; public UpdateHelper(Settings settings, ScriptService scriptService) { super(settings); this.scriptService = scriptService; } /** * Prepares an update request by converting it into an index or delete request or an update response (no action). */ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier nowInMillis) { final GetResult getResult = indexShard.getService().get(request.type(), request.id(), new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME, TTLFieldMapper.NAME, TimestampFieldMapper.NAME}, true, request.version(), request.versionType(), FetchSourceContext.FETCH_SOURCE); return prepare(indexShard.shardId(), request, getResult, nowInMillis); } /** * Prepares an update request by converting it into an index or delete request or an update response (no action). */ @SuppressWarnings("unchecked") protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult getResult, LongSupplier nowInMillis) { long getDateNS = System.nanoTime(); if (!getResult.isExists()) { if (request.upsertRequest() == null && !request.docAsUpsert()) { throw new DocumentMissingException(shardId, request.type(), request.id()); } IndexRequest indexRequest = request.docAsUpsert() ? request.doc() : request.upsertRequest(); TimeValue ttl = indexRequest.ttl(); if (request.scriptedUpsert() && request.script() != null) { // Run the script to perform the create logic IndexRequest upsert = request.upsertRequest(); Map<String, Object> upsertDoc = upsert.sourceAsMap(); Map<String, Object> ctx = new HashMap<>(2); // Tell the script that this is a create and not an update ctx.put("op", "create"); ctx.put("_source", upsertDoc); ctx.put("_now", nowInMillis.getAsLong()); ctx = executeScript(request.script, ctx); //Allow the script to set TTL using ctx._ttl if (ttl == null) { ttl = getTTLFromScriptContext(ctx); } //Allow the script to abort the create by setting "op" to "none" String scriptOpChoice = (String) ctx.get("op"); // Only valid options for an upsert script are "create" // (the default) or "none", meaning abort upsert if (!"create".equals(scriptOpChoice)) { if (!"none".equals(scriptOpChoice)) { logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, request.script.getIdOrCode()); } UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); update.setGetResult(getResult); return new Result(update, DocWriteResponse.Result.NOOP, upsertDoc, XContentType.JSON); } indexRequest.source((Map) ctx.get("_source")); } indexRequest.index(request.index()).type(request.type()).id(request.id()) // it has to be a "create!" .create(true) .ttl(ttl) .setRefreshPolicy(request.getRefreshPolicy()) .routing(request.routing()) .parent(request.parent()) .waitForActiveShards(request.waitForActiveShards()); if (request.versionType() != VersionType.INTERNAL) { // in all but the internal versioning mode, we want to create the new document using the given version. indexRequest.version(request.version()).versionType(request.versionType()); } return new Result(indexRequest, DocWriteResponse.Result.CREATED, null, null); } long updateVersion = getResult.getVersion(); if (request.versionType() != VersionType.INTERNAL) { assert request.versionType() == VersionType.FORCE; updateVersion = request.version(); // remember, match_any is excluded by the conflict test } if (getResult.internalSourceRef() == null) { // no source, we can't do nothing, through a failure... throw new DocumentSourceMissingException(shardId, request.type(), request.id()); } Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); String operation = null; String timestamp = null; TimeValue ttl = null; final Map<String, Object> updatedSourceAsMap; final XContentType updateSourceContentType = sourceAndContent.v1(); String routing = getResult.getFields().containsKey(RoutingFieldMapper.NAME) ? getResult.field(RoutingFieldMapper.NAME).getValue().toString() : null; String parent = getResult.getFields().containsKey(ParentFieldMapper.NAME) ? getResult.field(ParentFieldMapper.NAME).getValue().toString() : null; if (request.script() == null && request.doc() != null) { IndexRequest indexRequest = request.doc(); updatedSourceAsMap = sourceAndContent.v2(); if (indexRequest.ttl() != null) { ttl = indexRequest.ttl(); } timestamp = indexRequest.timestamp(); if (indexRequest.routing() != null) { routing = indexRequest.routing(); } if (indexRequest.parent() != null) { parent = indexRequest.parent(); } boolean noop = !XContentHelper.update(updatedSourceAsMap, indexRequest.sourceAsMap(), request.detectNoop()); // noop could still be true even if detectNoop isn't because update detects empty maps as noops. BUT we can only // actually turn the update into a noop if detectNoop is true to preserve backwards compatibility and to handle // cases where users repopulating multi-fields or adding synonyms, etc. if (request.detectNoop() && noop) { operation = "none"; } } else { Map<String, Object> ctx = new HashMap<>(16); Long originalTtl = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; Long originalTimestamp = getResult.getFields().containsKey(TimestampFieldMapper.NAME) ? (Long) getResult.field(TimestampFieldMapper.NAME).getValue() : null; ctx.put("_index", getResult.getIndex()); ctx.put("_type", getResult.getType()); ctx.put("_id", getResult.getId()); ctx.put("_version", getResult.getVersion()); ctx.put("_routing", routing); ctx.put("_parent", parent); ctx.put("_timestamp", originalTimestamp); ctx.put("_ttl", originalTtl); ctx.put("_source", sourceAndContent.v2()); ctx.put("_now", nowInMillis.getAsLong()); ctx = executeScript(request.script, ctx); operation = (String) ctx.get("op"); Object fetchedTimestamp = ctx.get("_timestamp"); if (fetchedTimestamp != null) { timestamp = fetchedTimestamp.toString(); } else if (originalTimestamp != null) { // No timestamp has been given in the update script, so we keep the previous timestamp if there is one timestamp = originalTimestamp.toString(); } ttl = getTTLFromScriptContext(ctx); updatedSourceAsMap = (Map<String, Object>) ctx.get("_source"); } // apply script to update the source // No TTL has been given in the update script so we keep previous TTL value if there is one if (ttl == null) { Long ttlAsLong = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; if (ttlAsLong != null) { ttl = new TimeValue(ttlAsLong - TimeValue.nsecToMSec(System.nanoTime() - getDateNS));// It is an approximation of exact TTL value, could be improved } } if (operation == null || "index".equals(operation)) { final IndexRequest indexRequest = Requests.indexRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent) .source(updatedSourceAsMap, updateSourceContentType) .version(updateVersion).versionType(request.versionType()) .waitForActiveShards(request.waitForActiveShards()) .timestamp(timestamp).ttl(ttl) .setRefreshPolicy(request.getRefreshPolicy()); return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); } else if ("delete".equals(operation)) { DeleteRequest deleteRequest = Requests.deleteRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent) .version(updateVersion).versionType(request.versionType()) .waitForActiveShards(request.waitForActiveShards()) .setRefreshPolicy(request.getRefreshPolicy()); return new Result(deleteRequest, DocWriteResponse.Result.DELETED, updatedSourceAsMap, updateSourceContentType); } else if ("none".equals(operation)) { UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } else { logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getIdOrCode()); UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } } private Map<String, Object> executeScript(Script script, Map<String, Object> ctx) { try { if (scriptService != null) { ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.UPDATE); executableScript.setNextVar("ctx", ctx); executableScript.run(); // we need to unwrap the ctx... ctx = (Map<String, Object>) executableScript.unwrap(ctx); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); } return ctx; } private TimeValue getTTLFromScriptContext(Map<String, Object> ctx) { Object fetchedTTL = ctx.get("_ttl"); if (fetchedTTL != null) { if (fetchedTTL instanceof Number) { return new TimeValue(((Number) fetchedTTL).longValue()); } return TimeValue.parseTimeValue((String) fetchedTTL, null, "_ttl"); } return null; } /** * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response. * For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response */ public GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version, final Map<String, Object> source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) { if ((request.fields() == null || request.fields().length == 0) && (request.fetchSource() == null || request.fetchSource().fetchSource() == false)) { return null; } SourceLookup sourceLookup = new SourceLookup(); sourceLookup.setSource(source); boolean sourceRequested = false; Map<String, GetField> fields = null; if (request.fields() != null && request.fields().length > 0) { for (String field : request.fields()) { if (field.equals("_source")) { sourceRequested = true; continue; } Object value = sourceLookup.extractValue(field); if (value != null) { if (fields == null) { fields = new HashMap<>(2); } GetField getField = fields.get(field); if (getField == null) { getField = new GetField(field, new ArrayList<>(2)); fields.put(field, getField); } getField.getValues().add(value); } } } BytesReference sourceFilteredAsBytes = sourceAsBytes; if (request.fetchSource() != null && request.fetchSource().fetchSource()) { sourceRequested = true; if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) { Object value = sourceLookup.filter(request.fetchSource()); try { final int initialCapacity = Math.min(1024, sourceAsBytes.length()); BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) { builder.value(value); sourceFilteredAsBytes = builder.bytes(); } } catch (IOException e) { throw new ElasticsearchException("Error filtering source", e); } } } // TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType) return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceRequested ? sourceFilteredAsBytes : null, fields); } public static class Result { private final Streamable action; private final DocWriteResponse.Result result; private final Map<String, Object> updatedSourceAsMap; private final XContentType updateSourceContentType; public Result(Streamable action, DocWriteResponse.Result result, Map<String, Object> updatedSourceAsMap, XContentType updateSourceContentType) { this.action = action; this.result = result; this.updatedSourceAsMap = updatedSourceAsMap; this.updateSourceContentType = updateSourceContentType; } @SuppressWarnings("unchecked") public <T extends Streamable> T action() { return (T) action; } public DocWriteResponse.Result getResponseResult() { return result; } public Map<String, Object> updatedSourceAsMap() { return updatedSourceAsMap; } public XContentType updateSourceContentType() { return updateSourceContentType; } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.compute.samples; import com.microsoft.azure.PagedList; import com.microsoft.azure.management.Azure; import com.microsoft.azure.management.compute.CachingTypes; import com.microsoft.azure.management.compute.StorageAccountTypes; import com.microsoft.azure.management.compute.VirtualMachineScaleSetVM; import com.microsoft.azure.management.network.LoadBalancerInboundNatRule; import com.microsoft.azure.management.network.Network; import com.microsoft.azure.management.network.PublicIPAddress; import com.microsoft.azure.management.network.LoadBalancer; import com.microsoft.azure.management.network.TransportProtocol; import com.microsoft.azure.management.compute.VirtualMachineScaleSet; import com.microsoft.azure.management.compute.KnownLinuxVirtualMachineImage; import com.microsoft.azure.management.compute.VirtualMachineScaleSetSkuTypes; import com.microsoft.azure.management.network.VirtualMachineScaleSetNetworkInterface; import com.microsoft.azure.management.network.VirtualMachineScaleSetNicIPConfiguration; import com.microsoft.azure.management.resources.fluentcore.arm.Region; import com.microsoft.azure.management.resources.fluentcore.utils.SdkContext; import com.microsoft.azure.management.samples.Utils; import com.microsoft.rest.LogLevel; import java.io.File; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * Azure Compute sample for managing virtual machine scale sets with un-managed disks - * - Create a virtual machine scale set behind an Internet facing load balancer * - Install Apache Web servers in virtual machines in the virtual machine scale set * - List the network interfaces associated with the virtual machine scale set * - List scale set virtual machine instances and SSH collection string * - Stop a virtual machine scale set * - Start a virtual machine scale set * - Update a virtual machine scale set * - Double the no. of virtual machines * - Restart a virtual machine scale set */ public final class ManageVirtualMachineScaleSet { /** * Main function which runs the actual sample. * @param azure instance of the azure client * @return true if sample runs successfully */ public static boolean runSample(Azure azure) { final Region region = Region.US_WEST_CENTRAL; final String rgName = SdkContext.randomResourceName("rgCOVS", 15); final String vnetName = SdkContext.randomResourceName("vnet", 24); final String loadBalancerName1 = SdkContext.randomResourceName("intlb" + "-", 18); final String publicIpName = "pip-" + loadBalancerName1; final String frontendName = loadBalancerName1 + "-FE1"; final String backendPoolName1 = loadBalancerName1 + "-BAP1"; final String backendPoolName2 = loadBalancerName1 + "-BAP2"; final String httpProbe = "httpProbe"; final String httpsProbe = "httpsProbe"; final String httpLoadBalancingRule = "httpRule"; final String httpsLoadBalancingRule = "httpsRule"; final String natPool50XXto22 = "natPool50XXto22"; final String natPool60XXto23 = "natPool60XXto23"; final String vmssName = SdkContext.randomResourceName("vmss", 24); final String userName = "tirekicker"; final String sshKey = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfSPC2K7LZcFKEO+/t3dzmQYtrJFZNxOsbVgOVKietqHyvmYGHEC0J2wPdAqQ/63g/hhAEFRoyehM+rbeDri4txB3YFfnOK58jqdkyXzupWqXzOrlKY4Wz9SKjjN765+dqUITjKRIaAip1Ri137szRg71WnrmdP3SphTRlCx1Bk2nXqWPsclbRDCiZeF8QOTi4JqbmJyK5+0UqhqYRduun8ylAwKKQJ1NJt85sYIHn9f1Rfr6Tq2zS0wZ7DHbZL+zB5rSlAr8QyUdg/GQD+cmSs6LvPJKL78d6hMGk84ARtFo4A79ovwX/Fj01znDQkU6nJildfkaolH2rWFG/qttD azjava@javalib.com"; final String apacheInstallScript = "https://raw.githubusercontent.com/Azure/azure-sdk-for-java/master/azure-samples/src/main/resources/install_apache.sh"; final String installCommand = "bash install_apache.sh"; List<String> fileUris = new ArrayList<>(); fileUris.add(apacheInstallScript); try { //============================================================= // Create a virtual network with a frontend subnet System.out.println("Creating virtual network with a frontend subnet ..."); Network network = azure.networks().define(vnetName) .withRegion(region) .withNewResourceGroup(rgName) .withAddressSpace("172.16.0.0/16") .defineSubnet("Front-end") .withAddressPrefix("172.16.1.0/24") .attach() .create(); System.out.println("Created a virtual network"); // Print the virtual network details Utils.print(network); //============================================================= // Create a public IP address System.out.println("Creating a public IP address..."); PublicIPAddress publicIPAddress = azure.publicIPAddresses().define(publicIpName) .withRegion(region) .withExistingResourceGroup(rgName) .withLeafDomainLabel(publicIpName) .create(); System.out.println("Created a public IP address"); // Print the virtual network details Utils.print(publicIPAddress); //============================================================= // Create an Internet facing load balancer with // One frontend IP address // Two backend address pools which contain network interfaces for the virtual // machines to receive HTTP and HTTPS network traffic from the load balancer // Two load balancing rules for HTTP and HTTPS to map public ports on the load // balancer to ports in the backend address pool // Two probes which contain HTTP and HTTPS health probes used to check availability // of virtual machines in the backend address pool // Three inbound NAT rules which contain rules that map a public port on the load // balancer to a port for a specific virtual machine in the backend address pool // - this provides direct VM connectivity for SSH to port 22 and TELNET to port 23 System.out.println("Creating a Internet facing load balancer with ..."); System.out.println("- A frontend IP address"); System.out.println("- Two backend address pools which contain network interfaces for the virtual\n" + " machines to receive HTTP and HTTPS network traffic from the load balancer"); System.out.println("- Two load balancing rules for HTTP and HTTPS to map public ports on the load\n" + " balancer to ports in the backend address pool"); System.out.println("- Two probes which contain HTTP and HTTPS health probes used to check availability\n" + " of virtual machines in the backend address pool"); System.out.println("- Two inbound NAT rules which contain rules that map a public port on the load\n" + " balancer to a port for a specific virtual machine in the backend address pool\n" + " - this provides direct VM connectivity for SSH to port 22 and TELNET to port 23"); LoadBalancer loadBalancer1 = azure.loadBalancers().define(loadBalancerName1) .withRegion(region) .withExistingResourceGroup(rgName) // Add two rules that uses above backend and probe .defineLoadBalancingRule(httpLoadBalancingRule) .withProtocol(TransportProtocol.TCP) .fromFrontend(frontendName) .fromFrontendPort(80) .toBackend(backendPoolName1) .withProbe(httpProbe) .attach() .defineLoadBalancingRule(httpsLoadBalancingRule) .withProtocol(TransportProtocol.TCP) .fromFrontend(frontendName) .fromFrontendPort(443) .toBackend(backendPoolName2) .withProbe(httpsProbe) .attach() // Add nat pools to enable direct VM connectivity for // SSH to port 22 and TELNET to port 23 .defineInboundNatPool(natPool50XXto22) .withProtocol(TransportProtocol.TCP) .fromFrontend(frontendName) .fromFrontendPortRange(5000, 5099) .toBackendPort(22) .attach() .defineInboundNatPool(natPool60XXto23) .withProtocol(TransportProtocol.TCP) .fromFrontend(frontendName) .fromFrontendPortRange(6000, 6099) .toBackendPort(23) .attach() // Explicitly define the frontend .definePublicFrontend(frontendName) .withExistingPublicIPAddress(publicIPAddress) .attach() // Add two probes one per rule .defineHttpProbe(httpProbe) .withRequestPath("/") .withPort(80) .attach() .defineHttpProbe(httpsProbe) .withRequestPath("/") .withPort(443) .attach() .create(); // Print load balancer details System.out.println("Created a load balancer"); Utils.print(loadBalancer1); //============================================================= // Create a virtual machine scale set with three virtual machines // And, install Apache Web servers on them System.out.println("Creating virtual machine scale set with three virtual machines" + " in the frontend subnet ..."); Date t1 = new Date(); VirtualMachineScaleSet virtualMachineScaleSet = azure.virtualMachineScaleSets().define(vmssName) .withRegion(region) .withExistingResourceGroup(rgName) .withSku(VirtualMachineScaleSetSkuTypes.STANDARD_D3_V2) .withExistingPrimaryNetworkSubnet(network, "Front-end") .withExistingPrimaryInternetFacingLoadBalancer(loadBalancer1) .withPrimaryInternetFacingLoadBalancerBackends(backendPoolName1, backendPoolName2) .withPrimaryInternetFacingLoadBalancerInboundNatPools(natPool50XXto22, natPool60XXto23) .withoutPrimaryInternalLoadBalancer() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername(userName) .withSsh(sshKey) .withNewDataDisk(100) .withNewDataDisk(100, 1, CachingTypes.READ_WRITE) .withNewDataDisk(100, 2, CachingTypes.READ_WRITE, StorageAccountTypes.STANDARD_LRS) .withCapacity(3) // Use a VM extension to install Apache Web servers .defineNewExtension("CustomScriptForLinux") .withPublisher("Microsoft.OSTCExtensions") .withType("CustomScriptForLinux") .withVersion("1.4") .withMinorVersionAutoUpgrade() .withPublicSetting("fileUris", fileUris) .withPublicSetting("commandToExecute", installCommand) .attach() .create(); Date t2 = new Date(); System.out.println("Created a virtual machine scale set with " + "3 Linux VMs & Apache Web servers on them: (took " + ((t2.getTime() - t1.getTime()) / 1000) + " seconds) "); System.out.println(); // Print virtual machine scale set details // Utils.print(virtualMachineScaleSet); //============================================================= // List virtual machine scale set network interfaces System.out.println("Listing scale set network interfaces ..."); PagedList<VirtualMachineScaleSetNetworkInterface> vmssNics = virtualMachineScaleSet.listNetworkInterfaces(); for (VirtualMachineScaleSetNetworkInterface vmssNic : vmssNics) { System.out.println(vmssNic.id()); } //============================================================= // List virtual machine scale set instance network interfaces and SSH connection string System.out.println("Listing scale set virtual machine instance network interfaces and SSH connection string..."); for (VirtualMachineScaleSetVM instance : virtualMachineScaleSet.virtualMachines().list()) { System.out.println("Scale set virtual machine instance #" + instance.instanceId()); System.out.println(instance.id()); PagedList<VirtualMachineScaleSetNetworkInterface> networkInterfaces = instance.listNetworkInterfaces(); // Pick the first NIC VirtualMachineScaleSetNetworkInterface networkInterface = networkInterfaces.get(0); for (VirtualMachineScaleSetNicIPConfiguration ipConfig :networkInterface.ipConfigurations().values()) { if (ipConfig.isPrimary()) { List<LoadBalancerInboundNatRule> natRules = ipConfig.listAssociatedLoadBalancerInboundNatRules(); for (LoadBalancerInboundNatRule natRule : natRules) { if (natRule.backendPort() == 22) { System.out.println("SSH connection string: " + userName + "@" + publicIPAddress.fqdn() + ":" + natRule.frontendPort()); break; } } break; } } } //============================================================= // Stop the virtual machine scale set System.out.println("Stopping virtual machine scale set ..."); virtualMachineScaleSet.powerOff(); System.out.println("Stopped virtual machine scale set"); //============================================================= // Deallocate the virtual machine scale set System.out.println("De-allocating virtual machine scale set ..."); virtualMachineScaleSet.deallocate(); System.out.println("De-allocated virtual machine scale set"); //============================================================= // Update the virtual machine scale set by removing and adding disk System.out.println("Updating virtual machine scale set managed data disks..."); virtualMachineScaleSet.update() .withoutDataDisk(0) .withoutDataDisk(200) .apply(); System.out.println("Updated virtual machine scale set"); //============================================================= // Start the virtual machine scale set System.out.println("Starting virtual machine scale set ..."); virtualMachineScaleSet.start(); System.out.println("Started virtual machine scale set"); //============================================================= // Update the virtual machine scale set // - double the no. of virtual machines System.out.println("Updating virtual machine scale set " + "- double the no. of virtual machines ..."); virtualMachineScaleSet.update() .withCapacity(6) .apply(); System.out.println("Doubled the no. of virtual machines in " + "the virtual machine scale set"); //============================================================= // re-start virtual machine scale set System.out.println("re-starting virtual machine scale set ..."); virtualMachineScaleSet.restart(); System.out.println("re-started virtual machine scale set"); return true; } catch (Exception f) { System.out.println(f.getMessage()); f.printStackTrace(); } finally { try { System.out.println("Deleting Resource Group: " + rgName); azure.resourceGroups().deleteByName(rgName); System.out.println("Deleted Resource Group: " + rgName); } catch (NullPointerException npe) { System.out.println("Did not create any resources in Azure. No clean up is necessary"); } catch (Exception g) { g.printStackTrace(); } } return false; } /** * Main entry point. * @param args parameters */ public static void main(String[] args) { try { //============================================================= // Authenticate System.out.println(System.getenv("AZURE_AUTH_LOCATION")); final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION")); Azure azure = Azure.configure() .withLogLevel(LogLevel.BASIC) .authenticate(credFile) .withDefaultSubscription(); // Print selected subscription System.out.println("Selected subscription: " + azure.subscriptionId()); runSample(azure); } catch (Exception e) { System.out.println(e.getMessage()); e.printStackTrace(); } } private ManageVirtualMachineScaleSet() { } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.projectRoots.impl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.projectRoots.ProjectRootListener; import com.intellij.openapi.projectRoots.ex.ProjectRoot; import com.intellij.openapi.projectRoots.ex.ProjectRootContainer; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.PersistentOrderRootType; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.JDOMExternalizable; import com.intellij.openapi.vfs.*; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashMap; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import java.util.List; import java.util.Map; /** * @author mike */ public class ProjectRootContainerImpl implements JDOMExternalizable, ProjectRootContainer { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.projectRoots.impl.ProjectRootContainerImpl"); private final Map<OrderRootType, CompositeProjectRoot> myRoots = new HashMap<OrderRootType, CompositeProjectRoot>(); private Map<OrderRootType, VirtualFile[]> myFiles = new HashMap<OrderRootType, VirtualFile[]>(); private boolean myInsideChange = false; private final List<ProjectRootListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private boolean myNoCopyJars = false; public ProjectRootContainerImpl(boolean noCopyJars) { myNoCopyJars = noCopyJars; for (OrderRootType rootType : OrderRootType.getAllTypes()) { myRoots.put(rootType, new CompositeProjectRoot()); myFiles.put(rootType, VirtualFile.EMPTY_ARRAY); } } @Override @NotNull public VirtualFile[] getRootFiles(@NotNull OrderRootType type) { return myFiles.get(type); } @Override @NotNull public ProjectRoot[] getRoots(@NotNull OrderRootType type) { return myRoots.get(type).getProjectRoots(); } @Override public void startChange() { LOG.assertTrue(!myInsideChange); myInsideChange = true; } @Override public void finishChange() { LOG.assertTrue(myInsideChange); HashMap<OrderRootType, VirtualFile[]> oldRoots = new HashMap<OrderRootType, VirtualFile[]>(myFiles); boolean changes = false; for (OrderRootType orderRootType : OrderRootType.getAllTypes()) { final VirtualFile[] roots = myRoots.get(orderRootType).getVirtualFiles(); changes = changes || !Comparing.equal(roots, oldRoots.get(orderRootType)); myFiles.put(orderRootType, roots); } if (changes) { fireRootsChanged(); } myInsideChange = false; } public void addProjectRootContainerListener(ProjectRootListener listener) { myListeners.add(listener); } public void removeProjectRootContainerListener(ProjectRootListener listener) { myListeners.remove(listener); } private void fireRootsChanged() { for (final ProjectRootListener listener : myListeners) { listener.rootsChanged(); } } @Override public void removeRoot(@NotNull ProjectRoot root, @NotNull OrderRootType type) { LOG.assertTrue(myInsideChange); myRoots.get(type).remove(root); } @Override @NotNull public ProjectRoot addRoot(@NotNull VirtualFile virtualFile, @NotNull OrderRootType type) { LOG.assertTrue(myInsideChange); return myRoots.get(type).add(virtualFile); } @Override public void addRoot(@NotNull ProjectRoot root, @NotNull OrderRootType type) { LOG.assertTrue(myInsideChange); myRoots.get(type).add(root); } @Override public void removeAllRoots(@NotNull OrderRootType type) { LOG.assertTrue(myInsideChange); myRoots.get(type).clear(); } @Override public void removeRoot(@NotNull VirtualFile root, @NotNull OrderRootType type) { LOG.assertTrue(myInsideChange); myRoots.get(type).remove(root); } @Override public void removeAllRoots() { LOG.assertTrue(myInsideChange); for (CompositeProjectRoot myRoot : myRoots.values()) { myRoot.clear(); } } @Override public void update() { LOG.assertTrue(myInsideChange); for (CompositeProjectRoot myRoot : myRoots.values()) { myRoot.update(); } } @Override public void readExternal(Element element) { for (PersistentOrderRootType type : OrderRootType.getAllPersistentTypes()) { read(element, type); } ApplicationManager.getApplication().runReadAction(new Runnable() { @Override public void run() { myFiles = new HashMap<OrderRootType, VirtualFile[]>(); for (OrderRootType rootType : myRoots.keySet()) { CompositeProjectRoot root = myRoots.get(rootType); if (myNoCopyJars) { setNoCopyJars(root); } myFiles.put(rootType, root.getVirtualFiles()); } } }); for (OrderRootType type : OrderRootType.getAllTypes()) { final VirtualFile[] newRoots = getRootFiles(type); final VirtualFile[] oldRoots = VirtualFile.EMPTY_ARRAY; if (!Comparing.equal(oldRoots, newRoots)) { fireRootsChanged(); break; } } } @Override public void writeExternal(Element element) { List<PersistentOrderRootType> allTypes = OrderRootType.getSortedRootTypes(); for (PersistentOrderRootType type : allTypes) { write(element, type); } } private static void setNoCopyJars(ProjectRoot root) { if (root instanceof SimpleProjectRoot) { String url = ((SimpleProjectRoot)root).getUrl(); if (StandardFileSystems.JAR_PROTOCOL.equals(VirtualFileManager.extractProtocol(url))) { String path = VirtualFileManager.extractPath(url); final VirtualFileSystem fileSystem = StandardFileSystems.jar(); if (fileSystem instanceof JarCopyingFileSystem) { ((JarCopyingFileSystem)fileSystem).setNoCopyJarForPath(path); } } } else if (root instanceof CompositeProjectRoot) { ProjectRoot[] roots = ((CompositeProjectRoot)root).getProjectRoots(); for (ProjectRoot root1 : roots) { setNoCopyJars(root1); } } } private void read(Element element, PersistentOrderRootType type) { String sdkRootName = type.getSdkRootName(); Element child = sdkRootName != null ? element.getChild(sdkRootName) : null; if (child == null) { myRoots.put(type, new CompositeProjectRoot()); return; } List<Element> children = child.getChildren(); LOG.assertTrue(children.size() == 1); CompositeProjectRoot root = (CompositeProjectRoot)ProjectRootUtil.read(children.get(0)); myRoots.put(type, root); } private void write(Element roots, PersistentOrderRootType type) { String sdkRootName = type.getSdkRootName(); if (sdkRootName != null) { Element e = new Element(sdkRootName); roots.addContent(e); final Element root = ProjectRootUtil.write(myRoots.get(type)); if (root != null) { e.addContent(root); } } } @SuppressWarnings({"HardCodedStringLiteral"}) void readOldVersion(Element child) { for (Element root : child.getChildren("root")) { String url = root.getAttributeValue("file"); SimpleProjectRoot projectRoot = new SimpleProjectRoot(url); String type = root.getChild("property").getAttributeValue("value"); for (PersistentOrderRootType rootType : OrderRootType.getAllPersistentTypes()) { if (type.equals(rootType.getOldSdkRootName())) { addRoot(projectRoot, rootType); break; } } } myFiles = new HashMap<OrderRootType, VirtualFile[]>(); for (OrderRootType rootType : myRoots.keySet()) { myFiles.put(rootType, myRoots.get(rootType).getVirtualFiles()); } for (OrderRootType type : OrderRootType.getAllTypes()) { final VirtualFile[] oldRoots = VirtualFile.EMPTY_ARRAY; final VirtualFile[] newRoots = getRootFiles(type); if (!Comparing.equal(oldRoots, newRoots)) { fireRootsChanged(); break; } } } }
package org.ovirt.engine.core.common.utils; import java.util.List; import org.ovirt.engine.core.common.businessentities.BaseDisk; import org.ovirt.engine.core.common.businessentities.BootSequence; import org.ovirt.engine.core.common.businessentities.Disk; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VmBase; import org.ovirt.engine.core.common.businessentities.VmDevice; import org.ovirt.engine.core.common.config.Config; import org.ovirt.engine.core.common.config.ConfigValues; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.Version; public class VmDeviceCommonUtils { public final static String LOW_VIDEO_MEM = "32768"; public final static String HIGH_VIDEO_MEM = "65536"; final static String NETWORK_CHAR = "N"; final static String CDROM_CHAR = "D"; final static String DRIVE_CHAR = "C"; public final static String CDROM_IMAGE_ID = "11111111-1111-1111-1111-111111111111"; public static boolean isNetwork(VmDevice device) { return (device.getType().equals(VmDeviceType.INTERFACE.getName())); } public static boolean isDisk(VmDevice device) { return (device.getType().equals(VmDeviceType.DISK.getName()) && device.getDevice() .equals(VmDeviceType.DISK.getName())); } public static boolean isCD(VmDevice device) { return (device.getType().equals(VmDeviceType.DISK.getName()) && device.getDevice() .equals(VmDeviceType.CDROM.getName())); } /** * updates given devices boot order * * @param devices * @param bootSequence * @param isOldCluster */ public static void updateVmDevicesBootOrder(VM vm, List<VmDevice> devices, boolean isOldCluster) { int bootOrder = 0; VmBase vmBase = vm.getStaticData(); // reset current boot order of all relevant devices before recomputing it. for (VmDevice device : devices) { if (isBootable(device)) { // a boot order of 0 prevents it from being sent to VDSM device.setBootOrder(0); } } BootSequence bootSequence = (vm.isRunOnce()) ? vm.getboot_sequence() : vm.getdefault_boot_sequence(); switch (bootSequence) { case C: bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); break; case CD: bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); bootOrder = setCDBootOrder(devices, bootOrder); break; case CDN: bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); bootOrder = setCDBootOrder(devices, bootOrder); bootOrder = setNetworkBootOrder(devices, bootOrder); break; case CN: bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); bootOrder = setNetworkBootOrder(devices, bootOrder); break; case CND: bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); bootOrder = setNetworkBootOrder(devices, bootOrder); bootOrder = setCDBootOrder(devices, bootOrder); break; case D: bootOrder = setCDBootOrder(devices, bootOrder); break; case DC: bootOrder = setCDBootOrder(devices, bootOrder); bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); break; case DCN: bootOrder = setCDBootOrder(devices, bootOrder); bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); bootOrder = setNetworkBootOrder(devices, bootOrder); break; case DN: bootOrder = setCDBootOrder(devices, bootOrder); bootOrder = setNetworkBootOrder(devices, bootOrder); break; case DNC: bootOrder = setCDBootOrder(devices, bootOrder); bootOrder = setNetworkBootOrder(devices, bootOrder); bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); break; case N: bootOrder = setNetworkBootOrder(devices, bootOrder); break; case NC: bootOrder = setNetworkBootOrder(devices, bootOrder); bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); break; case NCD: bootOrder = setNetworkBootOrder(devices, bootOrder); bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); bootOrder = setCDBootOrder(devices, bootOrder); break; case ND: bootOrder = setNetworkBootOrder(devices, bootOrder); bootOrder = setCDBootOrder(devices, bootOrder); break; case NDC: bootOrder = setNetworkBootOrder(devices, bootOrder); bootOrder = setCDBootOrder(devices, bootOrder); bootOrder = setDiskBootOrder(vm, devices, bootOrder, isOldCluster); break; } } /** * updates network devices boot order * * @param devices * @param bootOrder * @return */ private static int setNetworkBootOrder(List<VmDevice> devices, int bootOrder) { for (VmDevice device : devices) { if (device.getType().equals( VmDeviceType.INTERFACE.getName()) && device.getDevice().equals( VmDeviceType.BRIDGE.getName())) { if (device.getIsPlugged()) { device.setBootOrder(++bootOrder); } } } return bootOrder; } /** * updates CD boot order * * @param devices * @param bootOrder * @return */ private static int setCDBootOrder(List<VmDevice> devices, int bootOrder) { for (VmDevice device : devices) { if (device.getType() .equals(VmDeviceType.DISK.getName()) && device.getDevice().equals( VmDeviceType.CDROM.getName())) { if (device.getIsPlugged()) { device.setBootOrder(++bootOrder); } break; // only one CD is currently supported. } } return bootOrder; } /** * updates disk boot order * * @param vmBase * @param devices * @param bootOrder * @param isOldCluster * @return */ private static int setDiskBootOrder(VM vm, List<VmDevice> devices, int bootOrder, boolean isOldCluster) { for (VmDevice device : devices) { if (device.getType() .equals(VmDeviceType.DISK.getName()) && device.getDevice().equals( VmDeviceType.DISK.getName())) { Guid id = device.getDeviceId(); if (id != null && !id.equals(Guid.Empty)) { // gets the image disk BaseDisk disk = getDisk(vm, id); if (disk != null && disk.isBoot()) { device.setBootOrder(++bootOrder); if (isOldCluster) { // Only one system disk can be bootable in old version. break; } } } } } return bootOrder; } private static Disk getDisk(VM vm, Guid id) { for (Disk disk : vm.getDiskMap().values()) { if (disk.getId().equals(id)) { return disk; } } return null; } /** * Computes old boot sequence enum value from the given list of devices. * * @param devices * @return */ public static BootSequence getBootSequence(List<VmDevice> devices) { StringBuilder sb = new StringBuilder(); BootSequence ret = BootSequence.C; for (VmDevice device : devices) { if (device.getBootOrder() > 0) { if (isNetwork(device) && sb.indexOf(NETWORK_CHAR) < 0) { sb.append(NETWORK_CHAR); } if (isDisk(device) && sb.indexOf(DRIVE_CHAR) < 0) { sb.append(DRIVE_CHAR); } if (isCD(device) && sb.indexOf(CDROM_CHAR) < 0) { sb.append(CDROM_CHAR); } // maximum string is 3 characters, so, if reached , exit loop. if (sb.length() == 3) { break; } } } for (BootSequence bs : BootSequence.values()) { if (bs.name().equals(sb.toString())) { ret = bs; break; } } return ret; } public static boolean isOldClusterVersion(Version version) { return (!(version.getMajor() >= 3 && version.getMinor() >= 1)); } public static boolean isInWhiteList(String type, String device) { String expr = getDeviceTypeSearchExpr(type, device); String whiteList = Config.GetValue(ConfigValues.ManagedDevicesWhiteList); return (whiteList.indexOf(expr) >= 0); } private static boolean isBootable(VmDevice device) { return (VmDeviceType.DISK.getName().equals(device.getType()) || VmDeviceType.INTERFACE.getName() .equals(device.getType())); } private static String getDeviceTypeSearchExpr(String type, String device) { StringBuilder sb = new StringBuilder(); sb.append("type="); sb.append(type); sb.append(" device="); sb.append(device); sb.append(" "); return sb.toString(); } /** * is special device - device which is managed, but contains the general properties */ public static boolean isSpecialDevice(String device, String type) { return (VmDeviceType.SOUND.getName().equals(type) || VmDeviceType.USB.getName().equals(device) || (VmDeviceType.SPICEVMC.getName().equals(device) && VmDeviceType.REDIR.getName().equals(type)) || (VmDeviceType.MEMBALLOON.getName() .equals(device) && VmDeviceType.BALLOON.getName().equals(type))); } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.worker.block; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import alluxio.AlluxioTestDirectory; import alluxio.AlluxioURI; import alluxio.ConfigurationRule; import alluxio.conf.PropertyKey; import alluxio.conf.ServerConfiguration; import alluxio.exception.WorkerOutOfSpaceException; import alluxio.proto.dataserver.Protocol; import alluxio.underfs.UfsManager; import alluxio.underfs.UfsManager.UfsClient; import alluxio.underfs.UnderFileSystem; import alluxio.underfs.UnderFileSystemConfiguration; import alluxio.util.io.BufferUtils; import alluxio.worker.block.io.BlockReader; import alluxio.worker.block.meta.UnderFileSystemBlockMeta; import io.netty.buffer.ByteBuf; import io.netty.buffer.PooledByteBufAllocator; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.nio.ByteBuffer; import java.util.HashMap; public final class UnderFileSystemBlockReaderTest { private static final long TEST_BLOCK_SIZE = 1024; private static final long SESSION_ID = 1; private static final long BLOCK_ID = 2; private UnderFileSystemBlockReader mReader; private BlockStore mAlluxioBlockStore; private UnderFileSystemBlockMeta mUnderFileSystemBlockMeta; private UfsManager mUfsManager; private UfsInputStreamCache mUfsInstreamCache; private Protocol.OpenUfsBlockOptions mOpenUfsBlockOptions; /** Rule to create a new temporary folder during each test. */ @Rule public TemporaryFolder mFolder = new TemporaryFolder(); @Rule public ConfigurationRule mConfigurationRule = new ConfigurationRule(new HashMap<PropertyKey, String>() { { put(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS, AlluxioTestDirectory .createTemporaryDirectory("UnderFileSystemBlockReaderTest-RootUfs") .getAbsolutePath()); // ensure tiered storage uses different tmp dir for each test case put(PropertyKey.WORKER_TIERED_STORE_LEVEL0_DIRS_PATH, AlluxioTestDirectory .createTemporaryDirectory("UnderFileSystemBlockReaderTest-WorkerDataFolder") .getAbsolutePath()); put(PropertyKey.WORKER_TIERED_STORE_LEVELS, "1"); } }, ServerConfiguration.global()); @Before public void before() throws Exception { String ufsFolder = ServerConfiguration.get(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS); String testFilePath = File.createTempFile("temp", null, new File(ufsFolder)).getAbsolutePath(); byte[] buffer = BufferUtils.getIncreasingByteArray((int) TEST_BLOCK_SIZE * 2); BufferUtils.writeBufferToFile(testFilePath, buffer); mAlluxioBlockStore = new TieredBlockStore(); mUfsManager = mock(UfsManager.class); mUfsInstreamCache = new UfsInputStreamCache(); UfsClient ufsClient = new UfsClient( () -> UnderFileSystem.Factory.create(testFilePath.toString(), UnderFileSystemConfiguration.defaults(ServerConfiguration.global())), new AlluxioURI(testFilePath)); when(mUfsManager.get(anyLong())).thenReturn(ufsClient); mOpenUfsBlockOptions = Protocol.OpenUfsBlockOptions.newBuilder().setMaxUfsReadConcurrency(10) .setBlockSize(TEST_BLOCK_SIZE).setOffsetInFile(TEST_BLOCK_SIZE).setUfsPath(testFilePath) .build(); mUnderFileSystemBlockMeta = new UnderFileSystemBlockMeta(SESSION_ID, BLOCK_ID, mOpenUfsBlockOptions); } private void checkTempBlock(long start, long length) throws Exception { Assert.assertNotNull(mAlluxioBlockStore.getTempBlockMeta(SESSION_ID, BLOCK_ID)); mAlluxioBlockStore.commitBlock(SESSION_ID, BLOCK_ID, false); long lockId = mAlluxioBlockStore.lockBlock(SESSION_ID, BLOCK_ID); BlockReader reader = mAlluxioBlockStore.getBlockReader(SESSION_ID, BLOCK_ID, lockId); Assert.assertEquals(length, reader.getLength()); ByteBuffer buffer = reader.read(0, length); assertTrue(BufferUtils.equalIncreasingByteBuffer((int) start, (int) length, buffer)); reader.close(); } @Test public void readFullBlock() throws Exception { mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); ByteBuffer buffer = mReader.read(0, TEST_BLOCK_SIZE); assertTrue(BufferUtils.equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE, buffer)); mReader.close(); checkTempBlock(0, TEST_BLOCK_SIZE); } @Test public void readPartialBlock() throws Exception { mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); ByteBuffer buffer = mReader.read(0, TEST_BLOCK_SIZE - 1); assertTrue(BufferUtils.equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE - 1, buffer)); mReader.close(); // partial block should not be cached Assert.assertNull(mAlluxioBlockStore.getTempBlockMeta(SESSION_ID, BLOCK_ID)); } @Test public void offset() throws Exception { mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); ByteBuffer buffer = mReader.read(2, TEST_BLOCK_SIZE - 2); assertTrue(BufferUtils .equalIncreasingByteBuffer(2, (int) TEST_BLOCK_SIZE - 2, buffer)); mReader.close(); // partial block should not be cached Assert.assertNull(mAlluxioBlockStore.getTempBlockMeta(SESSION_ID, BLOCK_ID)); } @Test public void readOverlap() throws Exception { mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 2, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); ByteBuffer buffer = mReader.read(2, TEST_BLOCK_SIZE - 2); assertTrue(BufferUtils.equalIncreasingByteBuffer(2, (int) TEST_BLOCK_SIZE - 2, buffer)); buffer = mReader.read(0, TEST_BLOCK_SIZE - 2); assertTrue(BufferUtils.equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE - 2, buffer)); buffer = mReader.read(3, TEST_BLOCK_SIZE); assertTrue(BufferUtils.equalIncreasingByteBuffer(3, (int) TEST_BLOCK_SIZE - 3, buffer)); mReader.close(); // block should be cached as two reads covers the full block checkTempBlock(0, TEST_BLOCK_SIZE); } @Test public void readFullBlockNoCache() throws Exception { mUnderFileSystemBlockMeta = new UnderFileSystemBlockMeta(SESSION_ID, BLOCK_ID, mOpenUfsBlockOptions.toBuilder().setNoCache(true).build()); mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); ByteBuffer buffer = mReader.read(0, TEST_BLOCK_SIZE); // read should succeed even if error is thrown when caching assertTrue(BufferUtils.equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE, buffer)); mReader.close(); Assert.assertNull(mAlluxioBlockStore.getTempBlockMeta(SESSION_ID, BLOCK_ID)); } @Test public void readFullBlockRequestSpaceError() throws Exception { BlockStore errorThrowingBlockStore = spy(mAlluxioBlockStore); doThrow(new WorkerOutOfSpaceException("Ignored")) .when(errorThrowingBlockStore) .requestSpace(anyLong(), anyLong(), anyLong()); mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, errorThrowingBlockStore, mUfsManager, mUfsInstreamCache); ByteBuffer buffer = mReader.read(0, TEST_BLOCK_SIZE); assertTrue(BufferUtils.equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE, buffer)); mReader.close(); Assert.assertNull(mAlluxioBlockStore.getTempBlockMeta(SESSION_ID, BLOCK_ID)); } @Test public void readFullBlockRequestCreateBlockError() throws Exception { BlockStore errorThrowingBlockStore = spy(mAlluxioBlockStore); doThrow(new WorkerOutOfSpaceException("Ignored")).when(errorThrowingBlockStore) .createBlock(anyLong(), anyLong(), any(AllocateOptions.class)); mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, errorThrowingBlockStore, mUfsManager, mUfsInstreamCache); ByteBuffer buffer = mReader.read(0, TEST_BLOCK_SIZE); assertTrue(BufferUtils.equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE, buffer)); mReader.close(); Assert.assertNull(mAlluxioBlockStore.getTempBlockMeta(SESSION_ID, BLOCK_ID)); } @Test public void transferFullBlock() throws Exception { mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); ByteBuf buf = PooledByteBufAllocator.DEFAULT.buffer((int) TEST_BLOCK_SIZE * 2, (int) TEST_BLOCK_SIZE * 2); try { while (buf.writableBytes() > 0 && mReader.transferTo(buf) != -1) { } assertTrue(BufferUtils .equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE, buf.nioBuffer())); mReader.close(); } finally { buf.release(); } checkTempBlock(0, TEST_BLOCK_SIZE); } @Test public void transferPartialBlock() throws Exception { mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); ByteBuf buf = PooledByteBufAllocator.DEFAULT.buffer((int) TEST_BLOCK_SIZE / 2, (int) TEST_BLOCK_SIZE / 2); try { while (buf.writableBytes() > 0 && mReader.transferTo(buf) != -1) { } assertTrue(BufferUtils .equalIncreasingByteBuffer(0, (int) TEST_BLOCK_SIZE / 2, buf.nioBuffer())); mReader.close(); } finally { buf.release(); } // partial block should not be cached Assert.assertNull(mAlluxioBlockStore.getTempBlockMeta(SESSION_ID, BLOCK_ID)); } @Test public void getLocation() throws Exception { mReader = UnderFileSystemBlockReader.create(mUnderFileSystemBlockMeta, 0, false, mAlluxioBlockStore, mUfsManager, mUfsInstreamCache); assertTrue(mReader.getLocation().startsWith(mOpenUfsBlockOptions.getUfsPath())); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.notification; import com.intellij.ide.DataManager; import com.intellij.ide.IdeBundle; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.ActionUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.ui.popup.JBPopupListener; import com.intellij.openapi.ui.popup.LightweightWindowEvent; import com.intellij.openapi.util.text.StringUtil; import com.intellij.reference.SoftReference; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static com.intellij.openapi.util.NlsContexts.*; /** * <p>A notification has an optional title and subtitle, mandatory content (plain text or HTML), and optional actions.</p> * * <p>The notifications, generally, are shown in the balloons that appear on the screen when the corresponding events take place. * Notification balloon is of two types: two or three lines.<br> * Two lines: title and content line; title and actions; content line and actions; contents on two lines.<br> * Three lines: title and content line and actions; contents on two lines and actions; contents on three lines or more; etc.</p> * * <p><b>Warning:</b> please avoid links in HTML content, use {@link #addAction(AnAction)} instead.</p> * <p>Use {@link Notifications.Bus} to show notifications.</p> * * @see NotificationAction * @see com.intellij.notification.SingletonNotificationManager * * @author spleaner * @author Alexander Lobas */ public class Notification { /** * Tells which actions to keep (i.e. do not put under the "Actions" dropdown) when actions do not fit horizontally * into the width of the notification. */ public enum CollapseActionsDirection {KEEP_LEFTMOST, KEEP_RIGHTMOST} private static final Logger LOG = Logger.getInstance(Notification.class); private static final DataKey<Notification> KEY = DataKey.create("Notification"); public final @NotNull String id; private final @NotNull String myGroupId; private final @NotNull NotificationType myType; private @Nullable String myDisplayId; private @Nullable Icon myIcon; private @NotNull @NotificationTitle String myTitle; private @Nullable @NotificationSubtitle String mySubtitle; private @NotNull @NotificationContent String myContent; private @Nullable NotificationListener myListener; private @Nullable @LinkLabel String myDropDownText; private @Nullable List<AnAction> myActions; private @NotNull CollapseActionsDirection myCollapseDirection = CollapseActionsDirection.KEEP_RIGHTMOST; private @Nullable AnAction myContextHelpAction; private @Nullable Runnable myWhenExpired; private @Nullable Boolean myImportant; private final AtomicBoolean myExpired = new AtomicBoolean(false); private final AtomicReference<WeakReference<Balloon>> myBalloonRef = new AtomicReference<>(); private final long myTimestamp = System.currentTimeMillis(); /** See {@link #Notification(String, String, String, NotificationType)} */ public Notification(@NotNull String groupId, @NotNull @NotificationContent String content, @NotNull NotificationType type) { this(groupId, "", content, type); } /** * @param groupId notification group ID registered in {@code plugin.xml} via {@link com.intellij.notification.impl.NotificationGroupEP} * @param title an optional title (use an empty string ({@code ""}) to display the content without a title) */ public Notification(@NotNull String groupId, @NotNull @NotificationTitle String title, @NotNull @NotificationContent String content, @NotNull NotificationType type) { id = myTimestamp + "." + System.identityHashCode(this); myGroupId = groupId; myType = type; myTitle = title; myContent = content; } /** * Returns the time (in milliseconds since Jan 1, 1970) when the notification was created. */ public long getTimestamp() { return myTimestamp; } /** * Unique ID for usage statistics. */ public @Nullable String getDisplayId() { return myDisplayId; } public @NotNull Notification setDisplayId(@NotNull String displayId) { this.myDisplayId = displayId; return this; } public @Nullable Icon getIcon() { return myIcon; } public @NotNull Notification setIcon(@Nullable Icon icon) { myIcon = icon; return this; } public @NotNull String getGroupId() { return myGroupId; } public boolean hasTitle() { return !isEmpty(myTitle) || !isEmpty(mySubtitle); } public @NotNull @NotificationTitle String getTitle() { return myTitle; } public @NotNull Notification setTitle(@Nullable @NotificationTitle String title) { myTitle = StringUtil.notNullize(title); return this; } public @NotNull Notification setTitle(@Nullable @NotificationTitle String title, @Nullable @NotificationSubtitle String subtitle) { return setTitle(title).setSubtitle(subtitle); } public @Nullable @NotificationTitle String getSubtitle() { return mySubtitle; } public @NotNull Notification setSubtitle(@Nullable @NotificationTitle String subtitle) { mySubtitle = subtitle; return this; } public boolean hasContent() { return !isEmpty(myContent); } @Contract(value = "null -> true", pure = true) public static boolean isEmpty(@Nullable String text) { return StringUtil.isEmptyOrSpaces(text) || StringUtil.isEmptyOrSpaces(StringUtil.stripHtml(text, false)); } public @NotNull @NotificationContent String getContent() { return myContent; } public @NotNull Notification setContent(@NotificationContent @Nullable String content) { myContent = StringUtil.notNullize(content); return this; } public @Nullable NotificationListener getListener() { return myListener; } public @NotNull Notification setListener(@NotNull NotificationListener listener) { myListener = listener; return this; } public static @NotNull Notification get(@NotNull AnActionEvent e) { return Objects.requireNonNull(e.getData(KEY)); } public static void fire(final @NotNull Notification notification, @NotNull AnAction action) { fire(notification, action, null); } public static void fire(final @NotNull Notification notification, @NotNull AnAction action, @Nullable DataContext context) { DataContext contextWrapper = dataId -> KEY.is(dataId) ? notification : context != null ? context.getData(dataId) : null; AnActionEvent event = AnActionEvent.createFromAnAction(action, null, ActionPlaces.NOTIFICATION, contextWrapper); if (ActionUtil.lastUpdateAndCheckDumb(action, event, false)) { ActionUtil.performActionDumbAwareWithCallbacks(action, event); } } public static void setDataProvider(@NotNull Notification notification, @NotNull JComponent component) { DataManager.registerDataProvider(component, dataId -> KEY.is(dataId) ? notification : null); } public @NotNull @LinkLabel String getDropDownText() { if (myDropDownText == null) { myDropDownText = IdeBundle.message("link.label.actions"); } return myDropDownText; } /** * @param dropDownText text for popup when all actions collapsed (when all actions width more notification width) */ public @NotNull Notification setDropDownText(@NotNull @LinkLabel String dropDownText) { myDropDownText = dropDownText; return this; } public @NotNull CollapseActionsDirection getCollapseDirection() { return myCollapseDirection; } public @NotNull Notification setCollapseDirection(CollapseActionsDirection collapseDirection) { myCollapseDirection = collapseDirection; return this; } public @NotNull List<AnAction> getActions() { return myActions != null ? myActions : Collections.emptyList(); } /** * @see NotificationAction */ public @NotNull Notification addAction(@NotNull AnAction action) { (myActions != null ? myActions : (myActions = new ArrayList<>())).add(action); return this; } public @NotNull Notification addActions(@NotNull Collection<? extends AnAction> actions) { (myActions != null ? myActions : (myActions = new ArrayList<>())).addAll(actions); return this; } public @Nullable AnAction getContextHelpAction() { return myContextHelpAction; } public @NotNull Notification setContextHelpAction(AnAction action) { myContextHelpAction = action; return this; } public @NotNull NotificationType getType() { return myType; } public boolean isExpired() { return myExpired.get(); } public void expire() { if (!myExpired.compareAndSet(false, true)) return; UIUtil.invokeLaterIfNeeded(this::hideBalloon); NotificationsManager.getNotificationsManager().expire(this); Runnable whenExpired = myWhenExpired; if (whenExpired != null) whenExpired.run(); } public Notification whenExpired(@Nullable Runnable whenExpired) { myWhenExpired = whenExpired; return this; } public void hideBalloon() { hideBalloon(myBalloonRef.getAndSet(null)); } private static void hideBalloon(@Nullable Reference<? extends Balloon> balloonRef) { var balloon = SoftReference.dereference(balloonRef); if (balloon != null) { UIUtil.invokeLaterIfNeeded(balloon::hide); } } public void setBalloon(@NotNull Balloon balloon) { var oldBalloon = myBalloonRef.getAndSet(new WeakReference<>(balloon)); hideBalloon(oldBalloon); balloon.addListener(new JBPopupListener() { @Override public void onClosed(@NotNull LightweightWindowEvent event) { myBalloonRef.updateAndGet(prev -> SoftReference.dereference(prev) == balloon ? null : prev); } }); } public @Nullable Balloon getBalloon() { return SoftReference.dereference(myBalloonRef.get()); } public void notify(@Nullable Project project) { Notifications.Bus.notify(this, project); } public @NotNull Notification setImportant(boolean important) { myImportant = important; return this; } public boolean isImportant() { return myImportant != null ? myImportant : getListener() != null || myActions != null && !myActions.isEmpty(); } public final void assertHasTitleOrContent() { LOG.assertTrue(hasTitle() || hasContent(), "Notification should have title and/or content; groupId: " + myGroupId); } @Override public String toString() { return String.format("Notification{id='%s', myGroupId='%s', myType=%s, myTitle='%s', mySubtitle='%s', myContent='%s'}", id, myGroupId, myType, myTitle, mySubtitle, myContent); } //<editor-fold desc="Deprecated stuff."> /** @deprecated use {@link #Notification(String, String, NotificationType)} and {@link #setIcon} */ @Deprecated public Notification(@NotNull String groupId, @Nullable Icon icon, @NotNull NotificationType type) { this(groupId, "", type); setIcon(icon); } /** @deprecated use {@link #Notification(String, String, String, NotificationType)} and {@link #setListener} */ @Deprecated public Notification(@NotNull String groupId, @NotNull @NotificationTitle String title, @NotNull @NotificationContent String content, @NotNull NotificationType type, @Nullable NotificationListener listener) { this(groupId, title, content, type); myListener = listener; } /** @deprecated use {@link #Notification(String, String, NotificationType)}, {@link #setIcon}, {@link #setSubtitle}, {@link #setListener} */ @Deprecated public Notification(@NotNull String groupId, @Nullable Icon icon, @Nullable @NotificationTitle String title, @Nullable @NotificationSubtitle String subtitle, @Nullable @NotificationContent String content, @NotNull NotificationType type, @Nullable NotificationListener listener) { this(groupId, content != null ? content : "", type); setIcon(icon); setTitle(title, subtitle); myListener = listener; } /** @deprecated use {@link #addActions(Collection)} or {@link #addAction} */ @Deprecated public final void addActions(@NotNull List<? extends AnAction> actions) { addActions((Collection<? extends AnAction>)actions); } /** @deprecated use {@link #getCollapseDirection} */ @Deprecated public CollapseActionsDirection getCollapseActionsDirection() { return myCollapseDirection; } /** @deprecated use {@link #setCollapseDirection} */ @Deprecated public void setCollapseActionsDirection(CollapseActionsDirection collapseDirection) { myCollapseDirection = collapseDirection; } //</editor-fold> }
package kennewickpractice.com.stormy.ui; import android.content.Context; import android.content.Intent; import android.graphics.drawable.Drawable; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; import com.squareup.okhttp.Call; import com.squareup.okhttp.Callback; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Request; import com.squareup.okhttp.Response; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import butterknife.Bind; import butterknife.ButterKnife; import butterknife.OnClick; import kennewickpractice.com.stormy.R; import kennewickpractice.com.stormy.weather.Current; import kennewickpractice.com.stormy.weather.Day; import kennewickpractice.com.stormy.weather.Forecast; import kennewickpractice.com.stormy.weather.Hour; public class MainActivity extends AppCompatActivity { public static final String TAG = MainActivity.class.getSimpleName(); public static final String DAILY_FORECAST = "DAILY_FORECAST"; public static final String HOURLY_FORECAST = "HOURLY_FORECAST"; private Forecast mForecast; @Bind(R.id.timeLabel) TextView mTimeLabel; @Bind(R.id.temperatureLabel) TextView mTemperatureLabel; @Bind(R.id.precipValue) TextView mPrecipValue; @Bind(R.id.humidityValue) TextView mHumidityValue; @Bind(R.id.summaryLabel) TextView mSummaryLabel; @Bind(R.id.iconImageView) ImageView mIconImageView; @Bind(R.id.refreshImageView) ImageView mRefreshImageView; @Bind(R.id.progressBar) ProgressBar mProgressBar; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.bind(this); mProgressBar.setVisibility(View.INVISIBLE); final double latitude = 46.203611; final double longitude = -119.159167; mRefreshImageView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { getForecast(latitude, longitude); } }); getForecast(latitude, longitude); Log.d(TAG, "Main UI code is running!"); } private void getForecast(double latitude, double longitude) { String apiKey = "bb3f4dd91a44341972d14c1c0eef9218"; String forecastUrl = "https://api.forecast.io/forecast/" + apiKey + "/" + latitude + "," + longitude; if (isNetworkAvailable()) { toggleRefresh(); OkHttpClient client = new OkHttpClient(); Request request = new Request.Builder() .url(forecastUrl) .build(); Call call = client.newCall(request); call.enqueue(new Callback() { @Override public void onFailure(Request request, IOException e) { runOnUiThread(new Runnable() { @Override public void run() { toggleRefresh(); } }); alertUserAboutError(); } @Override public void onResponse(Response response) throws IOException { runOnUiThread(new Runnable() { @Override public void run() { toggleRefresh(); } }); try { String jsonData = response.body().string(); Log.v(TAG, jsonData); if (response.isSuccessful()) { mForecast = parseForecastDetails(jsonData); runOnUiThread(new Runnable() { @Override public void run() { updateDisplay(); } }); } else { alertUserAboutError(); } } catch (IOException e) { Log.e(TAG, "Exception caught: ", e); } catch (JSONException e) { Log.e(TAG, "Exception caught: ", e); } } }); } else { Toast.makeText(this, getString(R.string.network_unavailable_message), Toast.LENGTH_LONG).show(); } } private void toggleRefresh() { if (mProgressBar.getVisibility() == View.INVISIBLE) { mProgressBar.setVisibility(View.VISIBLE); mRefreshImageView.setVisibility(View.INVISIBLE); } else { mProgressBar.setVisibility(View.INVISIBLE); mRefreshImageView.setVisibility(View.VISIBLE); } } private void updateDisplay() { Current current = mForecast.getCurrent(); mTemperatureLabel.setText(current.getTemperature() + ""); mTimeLabel.setText("At " + current.getFormattedTime() + " it will be"); mHumidityValue.setText(current.getHumidity() + ""); mPrecipValue.setText(current.getPrecipChance() + "%"); mSummaryLabel.setText(current.getSummary()); Drawable drawable = getResources().getDrawable(current.getIconId()); mIconImageView.setImageDrawable(drawable); } private Forecast parseForecastDetails(String jsonData)throws JSONException { Forecast forecast = new Forecast(); forecast.setCurrent(getCurrentDetails(jsonData)); forecast.setHourlyForecast(getHourlyForecast(jsonData)); forecast.setDailyForecast(getDailyForecast(jsonData)); return forecast; } private Day[] getDailyForecast(String jsonData) throws JSONException { JSONObject forecast = new JSONObject(jsonData); String timezone = forecast.getString("timezone"); JSONObject daily = forecast.getJSONObject("daily"); JSONArray data = daily.getJSONArray("data"); Day [] days = new Day[data.length()]; for (int i = 0; i < data.length(); i++) { JSONObject jsonDay = data.getJSONObject(i); Day day = new Day (); day.setSummary(jsonDay.getString("summary")); day.setIcon(jsonDay.getString("icon")); day.setTemperatureMax(jsonDay.getDouble("temperatureMax")); day.setTime(jsonDay.getLong("time")); day.setTimezone(timezone); days [i] = day; } return days; } private Hour[] getHourlyForecast(String jsonData) throws JSONException { JSONObject forecast = new JSONObject(jsonData); String timezone = forecast.getString("timezone"); JSONObject hourly = forecast.getJSONObject("hourly"); JSONArray data = hourly.getJSONArray("data"); Hour [] hours = new Hour[data.length()]; for (int i = 0; i < data.length(); i++) { JSONObject jsonHour = data.getJSONObject(i); Hour hour = new Hour(); hour.setSummary(jsonHour.getString("summary")); hour.setIcon(jsonHour.getString("icon")); hour.setTemperature(jsonHour.getDouble("temperature")); hour.setTime(jsonHour.getLong("time")); //hour.setWindSpeed(jsonHour.getDouble("windSpeed")); hour.setTimezone(timezone); hours[i] = hour; } return hours; } private Current getCurrentDetails(String jsonData) throws JSONException { JSONObject forecast = new JSONObject(jsonData); String timezone = forecast.getString("timezone"); Log.i(TAG, "From JSON: " + timezone); JSONObject currently = forecast.getJSONObject("currently"); Current current = new Current(); current.setHumidity(currently.getDouble("humidity")); current.setTime(currently.getLong("time")); current.setIcon(currently.getString("icon")); current.setPrecipChance(currently.getDouble("precipProbability")); current.setSummary(currently.getString("summary")); current.setTemperature(currently.getDouble("temperature")); current.setTimeZone(timezone); Log.d(TAG, current.getFormattedTime()); return current; } private boolean isNetworkAvailable() { ConnectivityManager manager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = manager.getActiveNetworkInfo(); boolean isAvailable = false; if (networkInfo != null && networkInfo.isConnected()) { isAvailable = true; } return isAvailable; } private void alertUserAboutError() { AlertDialogFragment dialog = new AlertDialogFragment(); dialog.show(getFragmentManager(), "error_dialog"); } /* @OnClick(R.id.dailyButton) public void startDailyActivity(View view) { Intent intent = new Intent(this, DailyForecastActivity.class); intent.putExtra(DAILY_FORECAST, mForecast.getDailyForecast()); startActivity(intent); } */ @OnClick (R.id.dailyButton) public void startDailyActivity(View view) { Intent intent = new Intent(this, DailyForecastActivity.class); intent.putExtra(DAILY_FORECAST, mForecast.getDailyForecast()); startActivity(intent); } @OnClick (R.id.hourlyButton) public void startHourlyActivity(View view) { Intent intent = new Intent(this, HourlyForecastActivity.class); intent.putExtra(HOURLY_FORECAST, mForecast.getHourlyForecast()); startActivity(intent); } }
/* * Copyright (c) 2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.provisioning.impl.dummy; import static org.testng.AssertJUnit.assertTrue; import static com.evolveum.midpoint.test.IntegrationTestTools.display; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertNotNull; import java.io.File; import java.util.Collection; import java.util.List; import javax.xml.datatype.XMLGregorianCalendar; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.testng.annotations.Test; import com.evolveum.icf.dummy.resource.DummyAccount; import com.evolveum.midpoint.prism.Item; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.query.ObjectQuery; import com.evolveum.midpoint.provisioning.impl.ProvisioningTestUtil; import com.evolveum.midpoint.schema.GetOperationOptions; import com.evolveum.midpoint.schema.SelectorOptions; import com.evolveum.midpoint.schema.processor.ResourceAttribute; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.SchemaTestConstants; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.test.DummyResourceContoller; import com.evolveum.midpoint.test.IntegrationTestTools; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.CachingMetadataType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; /** * Almost the same as TestDummy but this is using a caching configuration. * * @author Radovan Semancik * */ @ContextConfiguration(locations = "classpath:ctx-provisioning-test-main.xml") @DirtiesContext public class TestDummyCaching extends TestDummy { public static final File TEST_DIR = new File("src/test/resources/impl/dummy-caching/"); public static final File RESOURCE_DUMMY_FILE = new File(TEST_DIR, "resource-dummy.xml"); @Override public void initSystem(Task initTask, OperationResult initResult) throws Exception { super.initSystem(initTask, initResult); } @Override protected File getResourceDummyFilename() { return RESOURCE_DUMMY_FILE; } /** * Make a native modification to an account and read it from the cache. Make sure that * cached data are returned and there is no read from the resource. * MID-3481 */ @Test @Override public void test107AGetModifiedAccountFromCacheMax() throws Exception { final String TEST_NAME = "test107AGetModifiedAccountFromCacheMax"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestDummy.class.getName() + "." + TEST_NAME); rememberShadowFetchOperationCount(); DummyAccount accountWill = getDummyAccountAssert(transformNameFromResource(ACCOUNT_WILL_USERNAME), willIcfUid); accountWill.replaceAttributeValue(DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Nice Pirate"); accountWill.replaceAttributeValue(DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_SHIP_NAME, "Interceptor"); accountWill.setEnabled(true); Collection<SelectorOptions<GetOperationOptions>> options = SelectorOptions.createCollection(GetOperationOptions.createMaxStaleness()); XMLGregorianCalendar startTs = clock.currentTimeXMLGregorianCalendar(); // WHEN TestUtil.displayWhen(TEST_NAME); PrismObject<ShadowType> shadow = provisioningService.getObject(ShadowType.class, ACCOUNT_WILL_OID, options, null, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); display("getObject result", result); TestUtil.assertSuccess(result); assertShadowFetchOperationCountIncrement(0); XMLGregorianCalendar endTs = clock.currentTimeXMLGregorianCalendar(); display("Retrieved account shadow", shadow); assertNotNull("No dummy account", shadow); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Pirate"); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_SHIP_NAME, "Black Pearl"); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME, "sword", "love"); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME, 42); Collection<ResourceAttribute<?>> attributes = ShadowUtil.getAttributes(shadow); assertEquals("Unexpected number of attributes", 7, attributes.size()); PrismObject<ShadowType> shadowRepo = repositoryService.getObject(ShadowType.class, ACCOUNT_WILL_OID, null, result); checkRepoAccountShadowWillBasic(shadowRepo, null, startTs, null); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Pirate"); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_SHIP_NAME, "Black Pearl"); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME, "sword", "love"); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME, 42); assertRepoShadowCacheActivation(shadowRepo, ActivationStatusType.DISABLED); checkConsistency(shadow); assertCachingMetadata(shadow, true, null, startTs); assertShadowFetchOperationCountIncrement(0); assertSteadyResource(); } /** * Make a native modification to an account and read it with high staleness option. * This should return cached data. * MID-3481 */ @Test @Override public void test107BGetModifiedAccountFromCacheHighStaleness() throws Exception { final String TEST_NAME = "test107BGetModifiedAccountFromCacheHighStaleness"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestDummy.class.getName() + "." + TEST_NAME); rememberShadowFetchOperationCount(); DummyAccount accountWill = getDummyAccountAssert(transformNameFromResource(ACCOUNT_WILL_USERNAME), willIcfUid); accountWill.replaceAttributeValue(DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Very Nice Pirate"); accountWill.setEnabled(true); Collection<SelectorOptions<GetOperationOptions>> options = SelectorOptions.createCollection(GetOperationOptions.createStaleness(1000000L)); XMLGregorianCalendar startTs = clock.currentTimeXMLGregorianCalendar(); // WHEN TestUtil.displayWhen(TEST_NAME); PrismObject<ShadowType> shadow = provisioningService.getObject(ShadowType.class, ACCOUNT_WILL_OID, options, null, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); display("getObject result", result); TestUtil.assertSuccess(result); assertShadowFetchOperationCountIncrement(0); XMLGregorianCalendar endTs = clock.currentTimeXMLGregorianCalendar(); display("Retrieved account shadow", shadow); assertNotNull("No dummy account", shadow); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Pirate"); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_SHIP_NAME, "Black Pearl"); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME, "sword", "love"); assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME, 42); Collection<ResourceAttribute<?>> attributes = ShadowUtil.getAttributes(shadow); assertEquals("Unexpected number of attributes", 7, attributes.size()); PrismObject<ShadowType> shadowRepo = repositoryService.getObject(ShadowType.class, ACCOUNT_WILL_OID, null, result); checkRepoAccountShadowWillBasic(shadowRepo, null, startTs, null); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Pirate"); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_SHIP_NAME, "Black Pearl"); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME, "sword", "love"); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME, 42); assertRepoShadowCacheActivation(shadowRepo, ActivationStatusType.DISABLED); checkConsistency(shadow); assertCachingMetadata(shadow, true, null, startTs); assertShadowFetchOperationCountIncrement(0); assertSteadyResource(); } /** * Search for all accounts with maximum staleness option. * This is supposed to return only cached data. Therefore * repo search is performed. * MID-3481 */ @Test @Override public void test119SearchAllAccountsMaxStaleness() throws Exception { final String TEST_NAME = "test119SearchAllAccountsMaxStaleness"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestDummy.class.getName() + "." + TEST_NAME); ObjectQuery query = IntegrationTestTools.createAllShadowsQuery(resourceType, SchemaTestConstants.ICF_ACCOUNT_OBJECT_CLASS_LOCAL_NAME, prismContext); display("All shadows query", query); XMLGregorianCalendar startTs = clock.currentTimeXMLGregorianCalendar(); rememberShadowFetchOperationCount(); Collection<SelectorOptions<GetOperationOptions>> options = SelectorOptions.createCollection(GetOperationOptions.createMaxStaleness()); // WHEN List<PrismObject<ShadowType>> allShadows = provisioningService.searchObjects(ShadowType.class, query, options, null, result); // THEN result.computeStatus(); display("searchObjects result", result); TestUtil.assertSuccess(result); display("Found " + allShadows.size() + " shadows"); assertFalse("No shadows found", allShadows.isEmpty()); assertEquals("Wrong number of results", 4, allShadows.size()); for (PrismObject<ShadowType> shadow: allShadows) { display("Found shadow", shadow); ShadowType shadowType = shadow.asObjectable(); OperationResultType fetchResult = shadowType.getFetchResult(); if (fetchResult != null) { assertEquals("Wrong fetch result status in "+shadow, OperationResultStatusType.SUCCESS, fetchResult.getStatus()); } assertCachingMetadata(shadow, true, null, startTs); if (shadow.asObjectable().getName().getOrig().equals("meathook")) { assertAttribute(shadow, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_SHIP_NAME, "Sea Monkey"); } } assertShadowFetchOperationCountIncrement(0); assertProtected(allShadows, 1); assertSteadyResource(); } @Override protected void checkRepoAccountShadowWill(PrismObject<ShadowType> shadowRepo, XMLGregorianCalendar start, XMLGregorianCalendar end) { // Sometimes there are 6 and sometimes 7 attributes. Treasure is not returned by default. It is not normally in the cache. // So do not check for number of attributes here. Check for individual values. checkRepoAccountShadowWillBasic(shadowRepo, start, end, null); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_SHIP_NAME, "Flying Dutchman"); // this is shadow, values are normalized assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME, "sword", "love"); assertRepoShadowCachedAttributeValue(shadowRepo, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME, 42); assertRepoShadowCacheActivation(shadowRepo, ActivationStatusType.ENABLED); } @Override protected void assertRepoShadowCacheActivation(PrismObject<ShadowType> shadowRepo, ActivationStatusType expectedAdministrativeStatus) { ActivationType activationType = shadowRepo.asObjectable().getActivation(); assertNotNull("No activation in repo shadow "+shadowRepo, activationType); ActivationStatusType administrativeStatus = activationType.getAdministrativeStatus(); assertEquals("Wrong activation administrativeStatus in repo shadow "+shadowRepo, expectedAdministrativeStatus, administrativeStatus); } /** * We do not know what the timestamp should be. But some timestamp should be there. */ @Override protected void assertRepoCachingMetadata(PrismObject<ShadowType> shadowFromRepo) { CachingMetadataType cachingMetadata = shadowFromRepo.asObjectable().getCachingMetadata(); assertNotNull("No caching metadata in "+shadowFromRepo, cachingMetadata); assertNotNull("Missing retrieval timestamp in caching metadata in "+shadowFromRepo, cachingMetadata.getRetrievalTimestamp()); } @Override protected void assertRepoCachingMetadata(PrismObject<ShadowType> shadowFromRepo, XMLGregorianCalendar start, XMLGregorianCalendar end) { CachingMetadataType cachingMetadata = shadowFromRepo.asObjectable().getCachingMetadata(); assertNotNull("No caching metadata in "+shadowFromRepo, cachingMetadata); TestUtil.assertBetween("Wrong retrieval timestamp in caching metadata in "+shadowFromRepo, start, end, cachingMetadata.getRetrievalTimestamp()); } @Override protected void assertCachingMetadata(PrismObject<ShadowType> shadow, boolean expectedCached, XMLGregorianCalendar startTs, XMLGregorianCalendar endTs) { CachingMetadataType cachingMetadata = shadow.asObjectable().getCachingMetadata(); if (expectedCached) { assertNotNull("No caching metadata in "+shadow, cachingMetadata); TestUtil.assertBetween("Wrong retrievalTimestamp in caching metadata in "+shadow, startTs, endTs, cachingMetadata.getRetrievalTimestamp()); } else { super.assertCachingMetadata(shadow, expectedCached, startTs, endTs); } } @Override protected void checkRepoAccountShadow(PrismObject<ShadowType> repoShadow) { ProvisioningTestUtil.checkRepoShadow(repoShadow, ShadowKindType.ACCOUNT, null); } @Override protected void checkRepoEntitlementShadow(PrismObject<ShadowType> repoShadow) { ProvisioningTestUtil.checkRepoShadow(repoShadow, ShadowKindType.ENTITLEMENT, null); } @Override protected void assertRepoShadowAttributes(List<Item<?,?>> attributes, int expectedNumberOfIdentifiers) { // We can only assert that there are at least the identifiers. But we do not know how many attributes should be there assertTrue("Unexpected number of attributes in repo shadow, expected at least "+ expectedNumberOfIdentifiers+", but was "+attributes.size(), attributes.size() >= expectedNumberOfIdentifiers); } @Override protected void assertSyncOldShadow(PrismObject<? extends ShadowType> oldShadow, String repoName) { assertSyncOldShadow(oldShadow, repoName, null); } @Override protected <T> void assertRepoShadowCachedAttributeValue(PrismObject<ShadowType> shadowRepo, String attrName, T... attrValues) { assertAttribute(shadowRepo, attrName, attrValues); } @Override protected void checkCachedAccountShadow(PrismObject<ShadowType> shadow, OperationResult parentResult, boolean fullShadow, XMLGregorianCalendar startTs, XMLGregorianCalendar endTs) throws SchemaException { super.checkAccountShadow(shadow, parentResult, fullShadow, startTs, endTs); if (fullShadow) { assertCachingMetadata(shadow, true, startTs, endTs); } } }
/* * Copyright 2010-2014 Ning, Inc. * Copyright 2014-2020 Groupon, Inc * Copyright 2020-2020 Equinix, Inc * Copyright 2014-2020 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.plugin.analytics.dao; import java.util.List; import java.util.UUID; import org.killbill.billing.plugin.analytics.dao.model.BusinessAccountFieldModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessAccountModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessAccountTagModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessAccountTransitionModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessBundleFieldModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessBundleModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessBundleTagModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceAdjustmentModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceFieldModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceItemAdjustmentModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceItemCreditModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceItemModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoicePaymentFieldModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoicePaymentTagModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceTagModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessModelDaoBase; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentAuthModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentCaptureModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentChargebackModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentCreditModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentFieldModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentMethodFieldModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentPurchaseModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessPaymentRefundModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessSubscriptionTransitionModelDao; import org.killbill.billing.plugin.analytics.dao.model.BusinessTransactionFieldModelDao; import org.killbill.billing.util.callcontext.CallContext; import org.killbill.billing.util.callcontext.TenantContext; import org.skife.jdbi.v2.sqlobject.Bind; import org.skife.jdbi.v2.sqlobject.BindBean; import org.skife.jdbi.v2.sqlobject.SqlQuery; import org.skife.jdbi.v2.sqlobject.SqlUpdate; import org.skife.jdbi.v2.sqlobject.customizers.Define; import org.skife.jdbi.v2.sqlobject.mixins.Transactional; public interface BusinessAnalyticsSqlDao extends Transactional<BusinessAnalyticsSqlDao> { // Note: the CallContext and TenantContext are not bound for now since they are not used (and createdDate would conflict) @SqlUpdate public void create(final String tableName, @BindBean final BusinessModelDaoBase entity, final CallContext callContext); @SqlUpdate public void deleteByBundleId(@Define("tableName") final String tableName, @Bind("bundleId") final UUID bundleId, @Bind("tenantRecordId") final Long tenantRecordId, final CallContext callContext); @SqlUpdate public void deleteByInvoiceId(@Define("tableName") final String tableName, @Bind("invoiceId") final UUID invoiceId, @Bind("tenantRecordId") final Long tenantRecordId, final CallContext callContext); @SqlUpdate public void deleteByAccountRecordId(@Define("tableName") final String tableName, @Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final CallContext callContext); @SqlQuery public BusinessAccountModelDao getAccountByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessSubscriptionTransitionModelDao> getSubscriptionTransitionsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessBundleModelDao> getBundlesByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessAccountTransitionModelDao> getAccountTransitionsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoiceModelDao> getInvoicesByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoiceAdjustmentModelDao> getInvoiceAdjustmentsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoiceItemModelDao> getInvoiceItemsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoiceItemAdjustmentModelDao> getInvoiceItemAdjustmentsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoiceItemCreditModelDao> getInvoiceItemCreditsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentAuthModelDao> getPaymentAuthsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentCaptureModelDao> getPaymentCapturesByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentPurchaseModelDao> getPaymentPurchasesByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentRefundModelDao> getPaymentRefundsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentCreditModelDao> getPaymentCreditsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentChargebackModelDao> getPaymentChargebacksByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessAccountFieldModelDao> getAccountFieldsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessBundleFieldModelDao> getBundleFieldsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoiceFieldModelDao> getInvoiceFieldsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoicePaymentFieldModelDao> getInvoicePaymentFieldsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentFieldModelDao> getPaymentFieldsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessPaymentMethodFieldModelDao> getPaymentMethodFieldsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessTransactionFieldModelDao> getTransactionFieldsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessAccountTagModelDao> getAccountTagsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessBundleTagModelDao> getBundleTagsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoiceTagModelDao> getInvoiceTagsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); @SqlQuery public List<BusinessInvoicePaymentTagModelDao> getInvoicePaymentTagsByAccountRecordId(@Bind("accountRecordId") final Long accountRecordId, @Bind("tenantRecordId") final Long tenantRecordId, final TenantContext tenantContext); }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.dvcs.push.ui; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.VcsDataKeys; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.TextRevisionNumber; import com.intellij.openapi.vcs.changes.committed.CommittedChangesTreeBrowser; import com.intellij.openapi.vcs.changes.ui.ChangesBrowser; import com.intellij.openapi.vcs.changes.ui.EditSourceForDialogAction; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.ui.*; import com.intellij.ui.components.JBScrollPane; import com.intellij.ui.components.JBViewport; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.ui.components.labels.LinkListener; import com.intellij.ui.treeStructure.actions.CollapseAllAction; import com.intellij.ui.treeStructure.actions.ExpandAllAction; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.ThreeStateCheckBox; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.util.ui.tree.WideSelectionTreeUI; import com.intellij.vcs.log.Hash; import com.intellij.vcs.log.ui.VcsLogActionPlaces; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.event.*; import javax.swing.tree.*; import java.awt.*; import java.awt.event.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.*; import java.util.List; import static com.intellij.openapi.actionSystem.IdeActions.ACTION_COLLAPSE_ALL; import static com.intellij.openapi.actionSystem.IdeActions.ACTION_EXPAND_ALL; import static com.intellij.util.containers.ContainerUtil.emptyList; public class PushLog extends JPanel implements DataProvider { private static final String CONTEXT_MENU = "Vcs.Push.ContextMenu"; private static final String START_EDITING = "startEditing"; private static final String SPLITTER_PROPORTION = "Vcs.Push.Splitter.Proportion"; private final ChangesBrowser myChangesBrowser; private final CheckboxTree myTree; private final MyTreeCellRenderer myTreeCellRenderer; private final JScrollPane myScrollPane; private final VcsCommitInfoBalloon myBalloon; private boolean myShouldRepaint = false; private boolean mySyncStrategy; @Nullable private String mySyncRenderedText; private final boolean myAllowSyncStrategy; public PushLog(Project project, final CheckedTreeNode root, final boolean allowSyncStrategy) { myAllowSyncStrategy = allowSyncStrategy; DefaultTreeModel treeModel = new DefaultTreeModel(root); treeModel.nodeStructureChanged(root); final AnAction quickDocAction = ActionManager.getInstance().getAction(IdeActions.ACTION_QUICK_JAVADOC); myTreeCellRenderer = new MyTreeCellRenderer(); myTree = new CheckboxTree(myTreeCellRenderer, root) { protected boolean shouldShowBusyIconIfNeeded() { return true; } public boolean isPathEditable(TreePath path) { return isEditable() && path.getLastPathComponent() instanceof DefaultMutableTreeNode; } @Override protected void onNodeStateChanged(CheckedTreeNode node) { if (node instanceof EditableTreeNode) { ((EditableTreeNode)node).fireOnSelectionChange(node.isChecked()); } } @Override public String getToolTipText(MouseEvent event) { final TreePath path = myTree.getPathForLocation(event.getX(), event.getY()); if (path == null) { return ""; } Object node = path.getLastPathComponent(); if (node == null || (!(node instanceof DefaultMutableTreeNode))) { return ""; } if (node instanceof TooltipNode) { return KeymapUtil.createTooltipText( ((TooltipNode)node).getTooltip() + "<p style='font-style:italic;color:gray;'>Show commit details", quickDocAction) + "</p>"; } return ""; } @Override public boolean stopEditing() { DefaultMutableTreeNode node = (DefaultMutableTreeNode)myTree.getLastSelectedPathComponent(); if (node instanceof EditableTreeNode) { JComponent editedComponent = (JComponent)node.getUserObject(); InputVerifier verifier = editedComponent.getInputVerifier(); if (verifier != null && !verifier.verify(editedComponent)) return false; } boolean result = super.stopEditing(); if (myShouldRepaint) { refreshNode(root); } restoreSelection(node); return result; } @Override public void cancelEditing() { DefaultMutableTreeNode lastSelectedPathComponent = (DefaultMutableTreeNode)myTree.getLastSelectedPathComponent(); super.cancelEditing(); if (myShouldRepaint) { refreshNode(root); } restoreSelection(lastSelectedPathComponent); } @Override protected void installSpeedSearch() { new TreeSpeedSearch(this, path -> { Object pathComponent = path.getLastPathComponent(); if (pathComponent instanceof RepositoryNode) { return ((RepositoryNode)pathComponent).getRepositoryName(); } return pathComponent.toString(); }); } }; myTree.setUI(new MyTreeUi()); myTree.setBorder(new EmptyBorder(2, 0, 0, 0)); //additional vertical indent myTree.setEditable(true); myTree.setHorizontalAutoScrollingEnabled(false); myTree.setShowsRootHandles(root.getChildCount() > 1); MyTreeCellEditor treeCellEditor = new MyTreeCellEditor(); myTree.setCellEditor(treeCellEditor); treeCellEditor.addCellEditorListener(new CellEditorListener() { @Override public void editingStopped(ChangeEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)myTree.getLastSelectedPathComponent(); if (node != null && node instanceof EditableTreeNode) { JComponent editedComponent = (JComponent)node.getUserObject(); InputVerifier verifier = editedComponent.getInputVerifier(); if (verifier != null && !verifier.verify(editedComponent)) { // if invalid and interrupted, then revert ((EditableTreeNode)node).fireOnCancel(); } else { if (mySyncStrategy) { resetEditSync(); ContainerUtil.process(getChildNodesByType(root, RepositoryNode.class, false), node1 -> { node1.fireOnChange(); return true; }); } else { ((EditableTreeNode)node).fireOnChange(); } } } myTree.firePropertyChange(PushLogTreeUtil.EDIT_MODE_PROP, true, false); } @Override public void editingCanceled(ChangeEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)myTree.getLastSelectedPathComponent(); if (node != null && node instanceof EditableTreeNode) { ((EditableTreeNode)node).fireOnCancel(); } resetEditSync(); myTree.firePropertyChange(PushLogTreeUtil.EDIT_MODE_PROP, true, false); } }); // complete editing when interrupt myTree.setInvokesStopCellEditing(true); myTree.setRootVisible(false); TreeUtil.collapseAll(myTree, 1); final VcsBranchEditorListener linkMouseListener = new VcsBranchEditorListener(myTreeCellRenderer); linkMouseListener.installOn(myTree); myBalloon = new VcsCommitInfoBalloon(myTree); myTree.getSelectionModel().setSelectionMode(TreeSelectionModel.DISCONTIGUOUS_TREE_SELECTION); myTree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { updateChangesView(); myBalloon.updateCommitDetails(); } }); myTree.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)myTree.getLastSelectedPathComponent(); if (node != null && node instanceof RepositoryNode && myTree.isEditing()) { //need to force repaint foreground for non-focused editing node myTree.getCellEditor().getTreeCellEditorComponent(myTree, node, true, false, false, myTree.getRowForPath( TreeUtil.getPathFromRoot(node))); } } }); myTree.getInputMap().put(KeyStroke.getKeyStroke(KeyEvent.VK_F2, 0), START_EDITING); //override default tree behaviour. myTree.getInputMap().put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), ""); myTree.getInputMap().put(KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), ""); MyShowCommitInfoAction showCommitInfoAction = new MyShowCommitInfoAction(); showCommitInfoAction.registerCustomShortcutSet(quickDocAction.getShortcutSet(), myTree); ExpandAllAction expandAllAction = new ExpandAllAction(myTree); expandAllAction.registerCustomShortcutSet(ActionManager.getInstance().getAction(ACTION_EXPAND_ALL).getShortcutSet(), myTree); CollapseAllAction collapseAll = new CollapseAllAction(myTree); collapseAll.registerCustomShortcutSet(ActionManager.getInstance().getAction(ACTION_COLLAPSE_ALL).getShortcutSet(), myTree); ToolTipManager.sharedInstance().registerComponent(myTree); PopupHandler.installPopupHandler(myTree, VcsLogActionPlaces.POPUP_ACTION_GROUP, CONTEXT_MENU); myChangesBrowser = new ChangesBrowser(project, null, Collections.emptyList(), null, false, false, null, ChangesBrowser.MyUseCase.LOCAL_CHANGES, null); myChangesBrowser.getDiffAction().registerCustomShortcutSet(myChangesBrowser.getDiffAction().getShortcutSet(), myTree); final EditSourceForDialogAction editSourceAction = new EditSourceForDialogAction(myChangesBrowser); editSourceAction.registerCustomShortcutSet(CommonShortcuts.getEditSource(), myChangesBrowser); myChangesBrowser.addToolbarAction(editSourceAction); myChangesBrowser.setMinimumSize(new Dimension(JBUI.scale(200), myChangesBrowser.getPreferredSize().height)); setDefaultEmptyText(); JBSplitter splitter = new JBSplitter(SPLITTER_PROPORTION, 0.7f); final JComponent syncStrategyPanel = myAllowSyncStrategy ? createStrategyPanel() : null; myScrollPane = new JBScrollPane(myTree) { @Override public void layout() { super.layout(); if (syncStrategyPanel != null) { Rectangle bounds = this.getViewport().getBounds(); int height = bounds.height - syncStrategyPanel.getPreferredSize().height; this.getViewport().setBounds(bounds.x, bounds.y, bounds.width, height); syncStrategyPanel.setBounds(bounds.x, bounds.y + height, bounds.width, syncStrategyPanel.getPreferredSize().height); } } }; if (syncStrategyPanel != null) { myScrollPane.setViewport(new MyTreeViewPort(myTree, syncStrategyPanel.getPreferredSize().height)); } myScrollPane.getViewport().setScrollMode(JViewport.SIMPLE_SCROLL_MODE); myScrollPane.setOpaque(false); if (syncStrategyPanel != null) { myScrollPane.add(syncStrategyPanel); } splitter.setFirstComponent(myScrollPane); splitter.setSecondComponent(myChangesBrowser); setLayout(new BorderLayout()); add(splitter); myTree.setMinimumSize(new Dimension(JBUI.scale(400), myTree.getPreferredSize().height)); myTree.setRowHeight(0); myScrollPane.setMinimumSize(new Dimension(myTree.getMinimumSize().width, myScrollPane.getPreferredSize().height)); } public void highlightNodeOrFirst(@Nullable RepositoryNode repositoryNode, boolean shouldScrollTo) { TreePath selectionPath = repositoryNode != null ? TreeUtil.getPathFromRoot(repositoryNode) : TreeUtil.getFirstNodePath(myTree); myTree.setSelectionPath(selectionPath); if (shouldScrollTo) { myTree.scrollPathToVisible(selectionPath); } } private class MyShowCommitInfoAction extends DumbAwareAction { @Override public void actionPerformed(AnActionEvent e) { myBalloon.showCommitDetails(); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(getSelectedCommitNodes().size() == 1); } } private void restoreSelection(@Nullable DefaultMutableTreeNode node) { if (node != null) { TreeUtil.selectNode(myTree, node); } } private JComponent createStrategyPanel() { final JPanel labelPanel = new JPanel(new BorderLayout()); labelPanel.setBackground(myTree.getBackground()); final LinkLabel<String> linkLabel = new LinkLabel<>("Edit all targets", null); linkLabel.setBorder(new EmptyBorder(2, 2, 2, 2)); linkLabel.setListener(new LinkListener<String>() { @Override public void linkSelected(LinkLabel aSource, String aLinkData) { if (linkLabel.isEnabled()) { startSyncEditing(); } } }, null); myTree.addPropertyChangeListener(PushLogTreeUtil.EDIT_MODE_PROP, new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { Boolean editMode = (Boolean)evt.getNewValue(); linkLabel.setEnabled(!editMode); linkLabel.setPaintUnderline(!editMode); linkLabel.repaint(); } }); labelPanel.add(linkLabel, BorderLayout.EAST); return labelPanel; } private void startSyncEditing() { mySyncStrategy = true; DefaultMutableTreeNode nodeToEdit = getFirstNodeToEdit(); if (nodeToEdit != null) { myTree.startEditingAtPath(TreeUtil.getPathFromRoot(nodeToEdit)); } } @NotNull private static List<Change> collectAllChanges(@NotNull List<CommitNode> commitNodes) { return CommittedChangesTreeBrowser.zipChanges(collectChanges(commitNodes)); } @NotNull private static List<CommitNode> collectSelectedCommitNodes(@NotNull List<DefaultMutableTreeNode> selectedNodes) { //addAll Commit nodes from selected Repository nodes; List<CommitNode> nodes = StreamEx.of(selectedNodes) .select(RepositoryNode.class) .toFlatList(node -> getChildNodesByType(node, CommitNode.class, true)); // add all others selected Commit nodes; nodes.addAll(StreamEx.of(selectedNodes) .select(CommitNode.class) .filter(node -> !nodes.contains(node)) .toList()); return nodes; } @NotNull private static List<Change> collectChanges(@NotNull List<CommitNode> commitNodes) { List<Change> changes = ContainerUtil.newArrayList(); for (CommitNode node : commitNodes) { changes.addAll(node.getUserObject().getChanges()); } return changes; } @NotNull private static <T> List<T> getChildNodesByType(@NotNull DefaultMutableTreeNode node, Class<T> type, boolean reverseOrder) { List<T> nodes = ContainerUtil.newArrayList(); if (node.getChildCount() < 1) { return nodes; } for (DefaultMutableTreeNode childNode = (DefaultMutableTreeNode)node.getFirstChild(); childNode != null; childNode = (DefaultMutableTreeNode)node.getChildAfter(childNode)) { if (type.isInstance(childNode)) { @SuppressWarnings("unchecked") T nodeT = (T)childNode; if (reverseOrder) { nodes.add(0, nodeT); } else { nodes.add(nodeT); } } } return nodes; } @NotNull private static List<Integer> getSortedRows(@NotNull int[] rows) { List<Integer> sorted = ContainerUtil.newArrayList(); for (int row : rows) { sorted.add(row); } Collections.sort(sorted, Collections.reverseOrder()); return sorted; } private void updateChangesView() { List<CommitNode> commitNodes = getSelectedCommitNodes(); if (!commitNodes.isEmpty()) { myChangesBrowser.getViewer().setEmptyText("No differences"); } else { setDefaultEmptyText(); } myChangesBrowser.setChangesToDisplay(collectAllChanges(commitNodes)); } private void setDefaultEmptyText() { myChangesBrowser.getViewer().setEmptyText("No commits selected"); } // Make changes available for diff action; revisionNumber for create patch and copy revision number actions @Nullable @Override public Object getData(String id) { if (VcsDataKeys.CHANGES.is(id)) { List<CommitNode> commitNodes = getSelectedCommitNodes(); return ArrayUtil.toObjectArray(collectAllChanges(commitNodes), Change.class); } else if (VcsDataKeys.VCS_REVISION_NUMBERS.is(id)) { List<CommitNode> commitNodes = getSelectedCommitNodes(); return ArrayUtil.toObjectArray(ContainerUtil.map(commitNodes, commitNode -> { Hash hash = commitNode.getUserObject().getId(); return new TextRevisionNumber(hash.asString(), hash.toShortString()); }), VcsRevisionNumber.class); } return null; } @NotNull private List<CommitNode> getSelectedCommitNodes() { List<DefaultMutableTreeNode> selectedNodes = getSelectedTreeNodes(); return selectedNodes.isEmpty() ? Collections.emptyList() : collectSelectedCommitNodes(selectedNodes); } @NotNull private List<DefaultMutableTreeNode> getSelectedTreeNodes() { int[] rows = myTree.getSelectionRows(); return (rows != null && rows.length != 0) ? getNodesForRows(getSortedRows(rows)) : emptyList(); } @NotNull private List<DefaultMutableTreeNode> getNodesForRows(@NotNull List<Integer> rows) { List<DefaultMutableTreeNode> nodes = ContainerUtil.newArrayList(); for (Integer row : rows) { TreePath path = myTree.getPathForRow(row); Object pathComponent = path == null ? null : path.getLastPathComponent(); if (pathComponent instanceof DefaultMutableTreeNode) { nodes.add((DefaultMutableTreeNode)pathComponent); } } return nodes; } @Override protected boolean processKeyBinding(KeyStroke ks, KeyEvent e, int condition, boolean pressed) { if (e.getKeyCode() == KeyEvent.VK_ENTER && e.getModifiers() == 0 && pressed) { if (myTree.isEditing()) { myTree.stopEditing(); } else { DefaultMutableTreeNode node = (DefaultMutableTreeNode)myTree.getLastSelectedPathComponent(); if (node != null) { myTree.startEditingAtPath(TreeUtil.getPathFromRoot(node)); } } return true; } if (myAllowSyncStrategy && e.getKeyCode() == KeyEvent.VK_F2 && e.getModifiers() == InputEvent.ALT_MASK && pressed) { startSyncEditing(); return true; } if (CheckboxTreeHelper.isToggleEvent(e, myTree) && pressed) { toggleRepositoriesFromCommits(); return true; } return super.processKeyBinding(ks, e, condition, pressed); } private void toggleRepositoriesFromCommits() { LinkedHashSet<CheckedTreeNode> checkedNodes = StreamEx.of(getSelectedTreeNodes()) .map(n -> n instanceof CommitNode ? n.getParent() : n) .select(CheckedTreeNode.class) .filter(CheckedTreeNode::isEnabled) .toCollection(LinkedHashSet::new); if (checkedNodes.isEmpty()) return; // use new state from first lead node; boolean newState = !checkedNodes.iterator().next().isChecked(); checkedNodes.forEach(n -> myTree.setNodeState(n, newState)); } @Nullable private DefaultMutableTreeNode getFirstNodeToEdit() { // start edit last selected component if editable if (myTree.getLastSelectedPathComponent() instanceof RepositoryNode) { RepositoryNode selectedNode = ((RepositoryNode)myTree.getLastSelectedPathComponent()); if (selectedNode.isEditableNow()) return selectedNode; } List<RepositoryNode> repositoryNodes = getChildNodesByType((DefaultMutableTreeNode)myTree.getModel().getRoot(), RepositoryNode.class, false); RepositoryNode editableNode = ContainerUtil.find(repositoryNodes, repositoryNode -> repositoryNode.isEditableNow()); if (editableNode != null) { TreeUtil.selectNode(myTree, editableNode); } return editableNode; } public JComponent getPreferredFocusedComponent() { return myTree; } @NotNull public CheckboxTree getTree() { return myTree; } public void selectIfNothingSelected(@NotNull TreeNode node) { if (myTree.isSelectionEmpty()) { myTree.setSelectionPath(TreeUtil.getPathFromRoot(node)); } } public void setChildren(@NotNull DefaultMutableTreeNode parentNode, @NotNull Collection<? extends DefaultMutableTreeNode> childrenNodes) { parentNode.removeAllChildren(); for (DefaultMutableTreeNode child : childrenNodes) { parentNode.add(child); } if (!myTree.isEditing()) { refreshNode(parentNode); TreePath path = TreeUtil.getPathFromRoot(parentNode); if (myTree.getSelectionModel().isPathSelected(path)) { updateChangesView(); } } else { myShouldRepaint = true; } } private void refreshNode(@NotNull DefaultMutableTreeNode parentNode) { //todo should be optimized in case of start loading just edited node final DefaultTreeModel model = ((DefaultTreeModel)myTree.getModel()); model.nodeStructureChanged(parentNode); autoExpandChecked(parentNode); myShouldRepaint = false; } private void autoExpandChecked(@NotNull DefaultMutableTreeNode node) { if (node.getChildCount() <= 0) return; if (node instanceof RepositoryNode) { expandIfChecked((RepositoryNode)node); return; } for (DefaultMutableTreeNode childNode = (DefaultMutableTreeNode)node.getFirstChild(); childNode != null; childNode = (DefaultMutableTreeNode)node.getChildAfter(childNode)) { if (!(childNode instanceof RepositoryNode)) return; expandIfChecked((RepositoryNode)childNode); } } private void expandIfChecked(@NotNull RepositoryNode node) { if (node.isChecked()) { TreePath path = TreeUtil.getPathFromRoot(node); myTree.expandPath(path); } } private void setSyncText(String value) { mySyncRenderedText = value; } public void fireEditorUpdated(@NotNull String currentText) { if (mySyncStrategy) { //update ui model List<RepositoryNode> repositoryNodes = getChildNodesByType((DefaultMutableTreeNode)myTree.getModel().getRoot(), RepositoryNode.class, false); for (RepositoryNode node : repositoryNodes) { if (node.isEditableNow()) { node.forceUpdateUiModelWithTypedText(currentText); } } setSyncText(currentText); myTree.repaint(); } } private void resetEditSync() { if (mySyncStrategy) { mySyncStrategy = false; mySyncRenderedText = null; } } private class MyTreeCellRenderer extends CheckboxTree.CheckboxTreeCellRenderer { @Override public void customizeRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { if (!(value instanceof DefaultMutableTreeNode)) { return; } myCheckbox.setBorder(null); //checkBox may have no border by default, but insets are not null, // it depends on LaF, OS and isItRenderedPane, see com.intellij.ide.ui.laf.darcula.ui.DarculaCheckBoxBorder. // null border works as expected always. if (value instanceof RepositoryNode) { //todo simplify, remove instance of RepositoryNode valueNode = (RepositoryNode)value; myCheckbox.setVisible(valueNode.isCheckboxVisible()); if (valueNode.isChecked() && valueNode.isLoading()) { myCheckbox.setState(ThreeStateCheckBox.State.DONT_CARE); } else { myCheckbox.setSelected(valueNode.isChecked()); } } Object userObject = ((DefaultMutableTreeNode)value).getUserObject(); ColoredTreeCellRenderer renderer = getTextRenderer(); if (value instanceof CustomRenderedTreeNode) { if (tree.isEditing() && mySyncStrategy && value instanceof RepositoryNode) { //sync rendering all editable fields ((RepositoryNode)value).render(renderer, mySyncRenderedText); } else { ((CustomRenderedTreeNode)value).render(renderer); } } else { renderer.append(userObject == null ? "" : userObject.toString()); } } } private class MyTreeCellEditor extends AbstractCellEditor implements TreeCellEditor { private RepositoryWithBranchPanel myValue; @Override public Component getTreeCellEditorComponent(JTree tree, Object value, boolean isSelected, boolean expanded, boolean leaf, int row) { RepositoryWithBranchPanel panel = (RepositoryWithBranchPanel)((DefaultMutableTreeNode)value).getUserObject(); myValue = panel; myTree.firePropertyChange(PushLogTreeUtil.EDIT_MODE_PROP, false, true); return panel.getTreeCellEditorComponent(tree, value, isSelected, expanded, leaf, row, true); } @Override public boolean isCellEditable(EventObject anEvent) { if (anEvent instanceof MouseEvent) { MouseEvent me = ((MouseEvent)anEvent); final TreePath path = myTree.getClosestPathForLocation(me.getX(), me.getY()); final int row = myTree.getRowForLocation(me.getX(), me.getY()); myTree.getCellRenderer().getTreeCellRendererComponent(myTree, path.getLastPathComponent(), false, false, true, row, true); Object tag = me.getClickCount() >= 1 ? PushLogTreeUtil.getTagAtForRenderer(myTreeCellRenderer, me) : null; return tag instanceof VcsEditableComponent; } //if keyboard event - then anEvent will be null =( See BasicTreeUi TreePath treePath = myTree.getAnchorSelectionPath(); //there is no selection path if we start editing during initial validation// if (treePath == null) return true; Object treeNode = treePath.getLastPathComponent(); return treeNode instanceof EditableTreeNode && ((EditableTreeNode)treeNode).isEditableNow(); } public Object getCellEditorValue() { return myValue; } } private class MyTreeUi extends WideSelectionTreeUI { private final ComponentListener myTreeSizeListener = new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { // invalidate, revalidate etc may have no 'size' effects, you need to manually invalidateSizes before. updateSizes(); } }; private final AncestorListener myTreeAncestorListener = new AncestorListenerAdapter() { @Override public void ancestorMoved(AncestorEvent event) { super.ancestorMoved(event); updateSizes(); } }; private void updateSizes() { treeState.invalidateSizes(); tree.repaint(); } @Override protected void installListeners() { super.installListeners(); tree.addComponentListener(myTreeSizeListener); tree.addAncestorListener(myTreeAncestorListener); } @Override protected void uninstallListeners() { tree.removeComponentListener(myTreeSizeListener); tree.removeAncestorListener(myTreeAncestorListener); super.uninstallListeners(); } @Override protected AbstractLayoutCache.NodeDimensions createNodeDimensions() { return new NodeDimensionsHandler() { @Override public Rectangle getNodeDimensions(Object value, int row, int depth, boolean expanded, Rectangle size) { Rectangle dimensions = super.getNodeDimensions(value, row, depth, expanded, size); dimensions.width = Math.max( myScrollPane != null ? myScrollPane.getViewport().getWidth() - getRowX(row, depth) : myTree.getMinimumSize().width, dimensions.width); return dimensions; } }; } } private static class MyTreeViewPort extends JBViewport { final int myHeightToReduce; public MyTreeViewPort(@Nullable Component view, int heightToReduce) { super(); setView(view); myHeightToReduce = heightToReduce; } @Override public Dimension getExtentSize() { Dimension defaultSize = super.getExtentSize(); return new Dimension(defaultSize.width, defaultSize.height - myHeightToReduce); } } }
/* // Licensed to DynamoBI Corporation (DynamoBI) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. DynamoBI licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. */ package org.eigenbase.relopt.volcano; import java.util.*; import java.util.logging.*; import org.eigenbase.rel.*; import org.eigenbase.relopt.*; import org.eigenbase.trace.*; /** * A <code>RelSet</code> is an equivalence-set of expressions; that is, a set of * expressions which have identical semantics. We are generally interested in * using the expression which has the lowest cost. * * <p>All of the expressions in an <code>RelSet</code> have the same calling * convention.</p> * * @author jhyde * @version $Id$ * @since 16 December, 2001 */ class RelSet { //~ Static fields/initializers --------------------------------------------- private static final Logger tracer = EigenbaseTrace.getPlannerTracer(); //~ Instance fields -------------------------------------------------------- final List<RelNode> rels = new ArrayList<RelNode>(); final List<RelSubset> subsets = new ArrayList<RelSubset>(); /** * List of {@link AbstractConverter} objects which have not yet been * satisfied. */ List<AbstractConverter> abstractConverters = new ArrayList<AbstractConverter>(); /** * Set to the superseding set when this is found to be equivalent to another * set. */ RelSet equivalentSet; RelNode rel; /** * Names of variables which are set by relational expressions in this set * and available for use by parent and child expressions. */ Set<String> variablesPropagated; /** * Names of variables which are used by relational expressions in this set. */ Set<String> variablesUsed; int id; /** * Reentrancy flag. */ boolean inMetadataQuery; //~ Constructors ----------------------------------------------------------- RelSet() { } //~ Methods ---------------------------------------------------------------- /** * Returns all of the {@link RelNode}s which reference {@link RelNode}s in * this set. */ public List<RelNode> getParentRels() { List<RelNode> parents = new ArrayList<RelNode>(); for (RelSubset subset : subsets) { parents.addAll(subset.parents); } return parents; } /** * @return all of the {@link RelNode}s contained by any subset of this set * (does not include the subset objects themselves) */ public List<RelNode> getRelsFromAllSubsets() { return rels; } public RelSubset getSubset(RelTraitSet traits) { for (RelSubset subset : subsets) { if (subset.getTraits().equals(traits)) { return subset; } } return null; } // removes all references to a specific relnode in both the subsets // and their parent relationships void obliterateRelNode(RelNode rel) { for (RelSubset subset : subsets) { subset.parents.remove(rel); subset.rels.remove(rel); } } /** * Adds a relational expression to a set, with its results available under a * particular calling convention. An expression may be in the set several * times with different calling conventions (and hence different costs). */ public RelSubset add(RelNode rel) { assert equivalentSet == null : "adding to a dead set"; RelSubset subset = getOrCreateSubset( rel.getCluster(), rel.getTraits()); subset.add(rel); return subset; } RelSubset getOrCreateSubset( RelOptCluster cluster, RelTraitSet traits) { RelSubset subset = getSubset(traits); if (subset == null) { subset = new RelSubset(cluster, this, traits); subsets.add(subset); VolcanoPlanner planner = (VolcanoPlanner) cluster.getPlanner(); if (planner.listener != null) { postEquivalenceEvent(planner, subset); } } return subset; } private void postEquivalenceEvent(VolcanoPlanner planner, RelNode rel) { RelOptListener.RelEquivalenceEvent event = new RelOptListener.RelEquivalenceEvent( planner, rel, "equivalence class " + id, false); planner.listener.relEquivalenceFound(event); } /** * Adds an expression <code>rel</code> to this set, without creating a * {@link org.eigenbase.relopt.volcano.RelSubset}. (Called only from * {@link org.eigenbase.relopt.volcano.RelSubset#add}. * * @param rel Relational expression */ void addInternal(RelNode rel) { if (!rels.contains(rel)) { rels.add(rel); VolcanoPlanner planner = (VolcanoPlanner) rel.getCluster().getPlanner(); if (planner.listener != null) { postEquivalenceEvent(planner, rel); } } if (this.rel == null) { this.rel = rel; } else { assert (rel.getCorrelVariable() == null); String correl = this.rel.getCorrelVariable(); if (correl != null) { rel.setCorrelVariable(correl); } // Row types must be the same, except for field names. RelOptUtil.verifyTypeEquivalence( this.rel, rel, this); } } /** * Merges <code>otherSet</code> into this RelSet. * * <p>One generally calls this method after discovering that two relational * expressions are equivalent, and hence the <code>RelSet</code>s they * belong to are equivalent also. * * <p>After this method completes, <code>otherSet</code> is obsolete, its * {@link #equivalentSet} member points to this RelSet, and this RelSet is * still alive. * * @param planner Planner * @param otherSet RelSet which is equivalent to this one */ void mergeWith( VolcanoPlanner planner, RelSet otherSet) { assert (this != otherSet); assert (this.equivalentSet == null); assert (otherSet.equivalentSet == null); tracer.finer("Merge set#" + otherSet.id + " into set#" + id); otherSet.equivalentSet = this; // remove from table boolean existed = planner.allSets.remove(otherSet); assert (existed) : "merging with a dead otherSet"; // merge subsets for (RelSubset otherSubset : otherSet.subsets) { planner.ruleQueue.subsetImportances.remove(otherSubset); RelSubset subset = getOrCreateSubset( otherSubset.getCluster(), otherSubset.getTraits()); if (otherSubset.bestCost.isLt(subset.bestCost)) { subset.bestCost = otherSubset.bestCost; subset.best = otherSubset.best; } for (RelNode otherRel : otherSubset.rels) { planner.reregister(this, otherRel); } } // Has another set merged with this? assert equivalentSet == null; // Update all rels which have a child in the other set, to reflect the // fact that the child has been renamed. for (RelNode parentRel : otherSet.getParentRels()) { planner.rename(parentRel); } // Renaming may have caused this set to merge with another. If so, // this set is now obsolete. There's no need to update the children // of this set - indeed, it could be dangerous. if (equivalentSet != null) { return; } // Make sure the cost changes as a result of merging are propagated. Set<RelSubset> activeSet = new HashSet<RelSubset>(); for (RelSubset relSubset : subsets) { for (RelSubset parentSubset : relSubset.getParentSubsets()) { for (RelNode parentRel : parentSubset.rels) { parentSubset.propagateCostImprovements( planner, parentRel, activeSet); } } } assert activeSet.isEmpty(); assert equivalentSet == null; // Each of the relations in the old set now has new parents, so // potentially new rules can fire. Check for rule matches, just as if // it were newly registered. (This may cause rules which have fired // once to fire again.) for (RelNode rel : rels) { assert planner.getSet(rel) == this; planner.fireRules(rel, true); } } } // End RelSet.java
package com.viesis.viescraft.common.items.upgrades; import java.text.DecimalFormat; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nullable; import com.viesis.viescraft.ViesCraft; import com.viesis.viescraft.api.EnumsVC; import com.viesis.viescraft.api.References; import com.viesis.viescraft.common.items.ItemHelper; import net.minecraft.client.Minecraft; import net.minecraft.client.resources.I18n; import net.minecraft.client.settings.GameSettings; import net.minecraft.client.util.ITooltipFlag; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.EnumRarity; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.NonNullList; import net.minecraft.util.text.TextFormatting; import net.minecraft.world.World; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; public class ItemUpgradeEngine extends Item { public ItemUpgradeEngine() { ItemHelper.setItemName(this, "upgrades/upgrade_airship_engine"); this.setHasSubtypes(true); this.setMaxDamage(0); this.setMaxStackSize(1); this.setCreativeTab(ViesCraft.tabViesCraftItems); } @SideOnly(Side.CLIENT) @Override public void addInformation(ItemStack stack, @Nullable World worldIn, List<String> tooltip, ITooltipFlag flagIn) { DecimalFormat df = new DecimalFormat("###.#"); float speedModCal = EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getFuelPerTick(); String speedMod = df.format(speedModCal); GameSettings gameSettingsIn = Minecraft.getMinecraft().gameSettings; TextFormatting stringColorMain = References.mainColorTooltip(stack); TextFormatting stringColorText = References.textColorTooltip(stack); tooltip.add(TextFormatting.DARK_GREEN + "================================"); if(stack.getMetadata() == 0) { tooltip.add(I18n.format("vc.item.tt.upgradeengine.0.1", new Object[0])); tooltip.add(I18n.format("vc.item.tt.upgradeengine.0.2", new Object[0])); tooltip.add(""); tooltip.add(I18n.format("vc.item.tt.upgradeengine.0.3", new Object[0])); } else if(gameSettingsIn.isKeyDown(gameSettingsIn.keyBindSneak)) { if(stack.getMetadata() == 1) { //Make your airships move faster! //Works in any airship version. tooltip.add(TextFormatting.BLACK + "-l" + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.1", new Object[0])); tooltip.add(TextFormatting.BLACK + "---iii" + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.2", new Object[0])); tooltip.add(TextFormatting.DARK_GREEN + "================================"); //Airship Fuel tooltip.add(TextFormatting.DARK_GREEN + "||" + TextFormatting.BLACK + "---------iiiiii" + TextFormatting.BLUE + I18n.format("vc.item.tt.airship.13", new Object[0]) + TextFormatting.DARK_BLUE + " : " + TextFormatting.GRAY + "(" + stringColorMain + "-" + df.format(EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getFuelPerTick()) + TextFormatting.GRAY + ")" + TextFormatting.BLACK + "---------il" + TextFormatting.DARK_GREEN + "||"); tooltip.add(TextFormatting.DARK_GREEN + "================================"); //tooltip.add(""); //Used in the airship's "Upgrade" menu. tooltip.add(TextFormatting.BLACK + "il" + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.4", new Object[0]) + TextFormatting.DARK_BLUE + " \"" + TextFormatting.BLUE + I18n.format("vc.item.tt.upgradeengine.#.5", new Object[0]) + TextFormatting.DARK_BLUE + "\" " + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.6", new Object[0])); tooltip.add(""); //Base Upgrade tooltip.add(TextFormatting.BLACK + "-------iil" + TextFormatting.DARK_BLUE + "-=" + TextFormatting.BLUE + I18n.format("vc.item.tt.upgradeengine.#.10", new Object[0]) + TextFormatting.DARK_BLUE + "=-"); //Engine Tier can't exceed Frame Tier. tooltip.add(TextFormatting.BLACK + "ii" + TextFormatting.DARK_RED + I18n.format("vc.item.tt.upgradeengine.#.9", new Object[0])); } else { //Make your airships move faster! //Works in any airship version. tooltip.add(TextFormatting.BLACK + "-l" + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.1", new Object[0])); tooltip.add(TextFormatting.BLACK + "---iii" + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.2", new Object[0])); tooltip.add(TextFormatting.DARK_GREEN + "================================"); //Airship Fuel tooltip.add(TextFormatting.DARK_GREEN + "||" + TextFormatting.BLACK + "---------iiiiii" + TextFormatting.BLUE + I18n.format("vc.item.tt.airship.13", new Object[0]) + TextFormatting.DARK_BLUE + " : " + TextFormatting.GRAY + "(" + stringColorMain + "-" + df.format(EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getFuelPerTick()) + TextFormatting.GRAY + ")" + TextFormatting.BLACK + "---------il" + TextFormatting.DARK_GREEN + "||"); tooltip.add(TextFormatting.DARK_GREEN + "================================"); //tooltip.add(""); //Used in the airship's "Upgrade" menu. tooltip.add(TextFormatting.BLACK + "il" + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.4", new Object[0]) + TextFormatting.DARK_BLUE + " \"" + TextFormatting.BLUE + I18n.format("vc.item.tt.upgradeengine.#.5", new Object[0]) + TextFormatting.DARK_BLUE + "\" " + stringColorText + I18n.format("vc.item.tt.upgradeengine.#.6", new Object[0])); tooltip.add(""); //Must upgrade from a Tier # engine. tooltip.add(TextFormatting.BLACK + "iiii" + TextFormatting.DARK_RED + I18n.format("vc.item.tt.upgradeengine.#.7", new Object[0]) + " " + (stack.getMetadata() - 1) + " " + I18n.format("vc.item.tt.upgradeengine.#.8", new Object[0])); //Engine Tier can't exceed Frame Tier. tooltip.add(TextFormatting.BLACK + "ii" + TextFormatting.DARK_RED + I18n.format("vc.item.tt.upgradeengine.#.9", new Object[0])); } } else { tooltip.add(TextFormatting.GREEN + I18n.format("vc.item.tt.shifthelper.0", new Object[0])); } tooltip.add(TextFormatting.DARK_GREEN + "================================"); } public EnumRarity getRarity(ItemStack stack) { if(stack.getMetadata() == 0) { this.setMaxStackSize(64); } else { this.setMaxStackSize(1); } switch (this.getMetadata(stack)) { case 0: return EnumRarity.COMMON; case 1: return EnumRarity.COMMON; case 2: return EnumRarity.UNCOMMON; case 3: return EnumRarity.RARE; case 4: return EnumRarity.EPIC; case 5: return EnumRarity.EPIC; default: return EnumRarity.COMMON; } } @Override public String getItemStackDisplayName(ItemStack stack) { String colorName = ""; switch(stack.getMetadata()) { case 0: return colorName = TextFormatting.WHITE + "Engine Fragment"; case 1: return colorName = TextFormatting.WHITE + "Engine Upgrade " + TextFormatting.GRAY + "(" + TextFormatting.WHITE + EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getLocalizedName() + TextFormatting.GRAY + ")"; case 2: return colorName = TextFormatting.YELLOW + "Engine Upgrade " + TextFormatting.GRAY + "(" + TextFormatting.YELLOW + EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getLocalizedName() + TextFormatting.GRAY + ")"; case 3: return colorName = TextFormatting.AQUA + "Engine Upgrade " + TextFormatting.GRAY + "(" + TextFormatting.AQUA + EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getLocalizedName() + TextFormatting.GRAY + ")"; case 4: return colorName = TextFormatting.LIGHT_PURPLE + "Engine Upgrade " + TextFormatting.GRAY + "(" + TextFormatting.LIGHT_PURPLE + EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getLocalizedName() + TextFormatting.GRAY + ")"; case 5: return colorName = TextFormatting.RED + "Engine Upgrade " + TextFormatting.GRAY + "(" + TextFormatting.RED + EnumsVC.AirshipTierEngine.byId(this.getMetadata(stack)).getLocalizedName() + TextFormatting.GRAY + ")"; } return colorName; } @SideOnly(Side.CLIENT) @Override public void getSubItems(final CreativeTabs tab, final NonNullList<ItemStack> subItems) { if (isInCreativeTab(tab)) { final List<ItemStack> items = Stream.of(EnumsVC.AirshipTierEngine.values()) .map(enumType -> new ItemStack(this, 1, enumType.getMetadata())) .collect(Collectors.toList()); subItems.addAll(items); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This is not the original file distributed by the Apache Software Foundation * It has been modified by the Hipparchus project */ package org.hipparchus.optim.nonlinear.scalar.noderiv; import org.hipparchus.exception.LocalizedCoreFormats; import org.hipparchus.exception.MathIllegalArgumentException; import org.hipparchus.exception.MathRuntimeException; import org.hipparchus.optim.ConvergenceChecker; import org.hipparchus.optim.PointValuePair; import org.hipparchus.optim.nonlinear.scalar.GoalType; import org.hipparchus.optim.nonlinear.scalar.LineSearch; import org.hipparchus.optim.nonlinear.scalar.MultivariateOptimizer; import org.hipparchus.optim.univariate.UnivariatePointValuePair; import org.hipparchus.util.FastMath; /** * Powell's algorithm. * This code is translated and adapted from the Python version of this * algorithm (as implemented in module {@code optimize.py} v0.5 of * <em>SciPy</em>). * <br/> * The default stopping criterion is based on the differences of the * function value between two successive iterations. It is however possible * to define a custom convergence checker that might terminate the algorithm * earlier. * <br/> * Line search is performed by the {@link LineSearch} class. * <br/> * Constraints are not supported: the call to * {@link #optimize(OptimizationData[]) optimize} will throw * {@link MathRuntimeException} if bounds are passed to it. * In order to impose simple constraints, the objective function must be * wrapped in an adapter like * {@link org.hipparchus.optim.nonlinear.scalar.MultivariateFunctionMappingAdapter * MultivariateFunctionMappingAdapter} or * {@link org.hipparchus.optim.nonlinear.scalar.MultivariateFunctionPenaltyAdapter * MultivariateFunctionPenaltyAdapter}. * */ public class PowellOptimizer extends MultivariateOptimizer { /** * Minimum relative tolerance. */ private static final double MIN_RELATIVE_TOLERANCE = 2 * FastMath.ulp(1d); /** * Relative threshold. */ private final double relativeThreshold; /** * Absolute threshold. */ private final double absoluteThreshold; /** * Line search. */ private final LineSearch line; /** * This constructor allows to specify a user-defined convergence checker, * in addition to the parameters that control the default convergence * checking procedure. * <br/> * The internal line search tolerances are set to the square-root of their * corresponding value in the multivariate optimizer. * * @param rel Relative threshold. * @param abs Absolute threshold. * @param checker Convergence checker. * @throws MathIllegalArgumentException if {@code abs <= 0}. * @throws MathIllegalArgumentException if {@code rel < 2 * Math.ulp(1d)}. */ public PowellOptimizer(double rel, double abs, ConvergenceChecker<PointValuePair> checker) { this(rel, abs, FastMath.sqrt(rel), FastMath.sqrt(abs), checker); } /** * This constructor allows to specify a user-defined convergence checker, * in addition to the parameters that control the default convergence * checking procedure and the line search tolerances. * * @param rel Relative threshold for this optimizer. * @param abs Absolute threshold for this optimizer. * @param lineRel Relative threshold for the internal line search optimizer. * @param lineAbs Absolute threshold for the internal line search optimizer. * @param checker Convergence checker. * @throws MathIllegalArgumentException if {@code abs <= 0}. * @throws MathIllegalArgumentException if {@code rel < 2 * Math.ulp(1d)}. */ public PowellOptimizer(double rel, double abs, double lineRel, double lineAbs, ConvergenceChecker<PointValuePair> checker) { super(checker); if (rel < MIN_RELATIVE_TOLERANCE) { throw new MathIllegalArgumentException(LocalizedCoreFormats.NUMBER_TOO_SMALL, rel, MIN_RELATIVE_TOLERANCE); } if (abs <= 0) { throw new MathIllegalArgumentException(LocalizedCoreFormats.NUMBER_TOO_SMALL_BOUND_EXCLUDED, abs, 0); } relativeThreshold = rel; absoluteThreshold = abs; // Create the line search optimizer. line = new LineSearch(this, lineRel, lineAbs, 1d); } /** * The parameters control the default convergence checking procedure. * <br/> * The internal line search tolerances are set to the square-root of their * corresponding value in the multivariate optimizer. * * @param rel Relative threshold. * @param abs Absolute threshold. * @throws MathIllegalArgumentException if {@code abs <= 0}. * @throws MathIllegalArgumentException if {@code rel < 2 * Math.ulp(1d)}. */ public PowellOptimizer(double rel, double abs) { this(rel, abs, null); } /** * Builds an instance with the default convergence checking procedure. * * @param rel Relative threshold. * @param abs Absolute threshold. * @param lineRel Relative threshold for the internal line search optimizer. * @param lineAbs Absolute threshold for the internal line search optimizer. * @throws MathIllegalArgumentException if {@code abs <= 0}. * @throws MathIllegalArgumentException if {@code rel < 2 * Math.ulp(1d)}. */ public PowellOptimizer(double rel, double abs, double lineRel, double lineAbs) { this(rel, abs, lineRel, lineAbs, null); } /** {@inheritDoc} */ @Override protected PointValuePair doOptimize() { checkParameters(); final GoalType goal = getGoalType(); final double[] guess = getStartPoint(); final int n = guess.length; final double[][] direc = new double[n][n]; for (int i = 0; i < n; i++) { direc[i][i] = 1; } final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker(); double[] x = guess; double fVal = computeObjectiveValue(x); double[] x1 = x.clone(); while (true) { incrementIterationCount(); double fX = fVal; double delta = 0; int bigInd = 0; for (int i = 0; i < n; i++) { final double[] d = direc[i].clone(); final double fX2 = fVal; final UnivariatePointValuePair optimum = line.search(x, d); fVal = optimum.getValue(); final double alphaMin = optimum.getPoint(); final double[][] result = newPointAndDirection(x, d, alphaMin); x = result[0]; if ((fX2 - fVal) > delta) { delta = fX2 - fVal; bigInd = i; } } // Default convergence check. boolean stop = 2 * (fX - fVal) <= (relativeThreshold * (FastMath.abs(fX) + FastMath.abs(fVal)) + absoluteThreshold); final PointValuePair previous = new PointValuePair(x1, fX); final PointValuePair current = new PointValuePair(x, fVal); if (!stop && checker != null) { // User-defined stopping criteria. stop = checker.converged(getIterations(), previous, current); } if (stop) { if (goal == GoalType.MINIMIZE) { return (fVal < fX) ? current : previous; } else { return (fVal > fX) ? current : previous; } } final double[] d = new double[n]; final double[] x2 = new double[n]; for (int i = 0; i < n; i++) { d[i] = x[i] - x1[i]; x2[i] = 2 * x[i] - x1[i]; } x1 = x.clone(); final double fX2 = computeObjectiveValue(x2); if (fX > fX2) { double t = 2 * (fX + fX2 - 2 * fVal); double temp = fX - fVal - delta; t *= temp * temp; temp = fX - fX2; t -= delta * temp * temp; if (t < 0.0) { final UnivariatePointValuePair optimum = line.search(x, d); fVal = optimum.getValue(); final double alphaMin = optimum.getPoint(); final double[][] result = newPointAndDirection(x, d, alphaMin); x = result[0]; final int lastInd = n - 1; direc[bigInd] = direc[lastInd]; direc[lastInd] = result[1]; } } } } /** * Compute a new point (in the original space) and a new direction * vector, resulting from the line search. * * @param p Point used in the line search. * @param d Direction used in the line search. * @param optimum Optimum found by the line search. * @return a 2-element array containing the new point (at index 0) and * the new direction (at index 1). */ private double[][] newPointAndDirection(double[] p, double[] d, double optimum) { final int n = p.length; final double[] nP = new double[n]; final double[] nD = new double[n]; for (int i = 0; i < n; i++) { nD[i] = d[i] * optimum; nP[i] = p[i] + nD[i]; } final double[][] result = new double[2][]; result[0] = nP; result[1] = nD; return result; } /** * @throws MathRuntimeException if bounds were passed to the * {@link #optimize(OptimizationData[]) optimize} method. */ private void checkParameters() { if (getLowerBound() != null || getUpperBound() != null) { throw new MathRuntimeException(LocalizedCoreFormats.CONSTRAINT); } } }
/* Derby - Class org.apache.derby.impl.services.reflect.UpdateLoader Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.services.reflect; import org.apache.derby.iapi.services.context.ContextService; import org.apache.derby.iapi.services.monitor.Monitor; import org.apache.derby.iapi.services.stream.HeaderPrintWriter; import org.apache.derby.iapi.util.IdUtil; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.services.locks.ShExLockable; import org.apache.derby.iapi.services.locks.ShExQual; import org.apache.derby.iapi.services.locks.LockFactory; import org.apache.derby.iapi.services.locks.Latch; import org.apache.derby.iapi.services.locks.C_LockFactory; import org.apache.derby.iapi.services.loader.ClassFactoryContext; import org.apache.derby.iapi.services.loader.JarReader; import org.apache.derby.iapi.services.property.PersistentSet; import org.apache.derby.iapi.services.property.PropertyUtil; import org.apache.derby.iapi.reference.Property; import java.io.InputStream; import java.security.AccessController; import org.apache.derby.iapi.reference.MessageId; import org.apache.derby.iapi.reference.Module; import org.apache.derby.iapi.services.i18n.MessageService; import org.apache.derby.iapi.services.locks.CompatibilitySpace; import org.apache.derby.iapi.services.locks.LockOwner; /** * UpdateLoader implements then functionality of * derby.database.classpath. It manages the ClassLoaders * (instances of JarLoader) for each installed jar file. * Jar files are installed through the sqlj.install_jar procedure. * <BR> * Each JarLoader delegates any request through standard mechanisms * to load a class to this object, which will then ask each jarLoader in order of * derby.database.classpath to load the class through an internal api. * This means if the third jar in derby.database.classpath tries to load * a class, say from the class for a procedure's method making some * reference to it, then the request is delegated to UpdateLoader. * UpdateLoader will then try to load the class from each of the jars * in order of derby.database.classpath using the jar's installed JarLoader. */ final class UpdateLoader implements LockOwner { /** * List of packages that Derby will not support being loaded * from an installed jar file. */ private static final String[] RESTRICTED_PACKAGES = { // While loading java. classes is blocked by the standard // class loading mechanism, javax. ones are not. However // allowing database applications to override jvm classes // seems a bad idea. "javax.", // Allowing an application to possible override the engine's // own classes also seems dangerous. "org.apache.derby.", }; private JarLoader[] jarList; private HeaderPrintWriter vs; private final ClassLoader myLoader; private boolean initDone; private String thisClasspath; private final LockFactory lf; private final ShExLockable classLoaderLock; private int version; private boolean normalizeToUpper; private DatabaseClasses parent; private final CompatibilitySpace compat; private boolean needReload; private JarReader jarReader; UpdateLoader(String classpath, DatabaseClasses parent, boolean verbose, boolean normalizeToUpper) throws StandardException { this.normalizeToUpper = normalizeToUpper; this.parent = parent; lf = (LockFactory) Monitor.getServiceModule(parent, Module.LockFactory); compat = (lf != null) ? lf.createCompatibilitySpace(this) : null; if (verbose) { vs = Monitor.getStream(); } myLoader = getClass().getClassLoader(); this.classLoaderLock = new ClassLoaderLock(this); initializeFromClassPath(classpath); } private void initializeFromClassPath(String classpath) throws StandardException { final String[][] elements = IdUtil.parseDbClassPath(classpath); final int jarCount = elements.length; jarList = new JarLoader[jarCount]; if (jarCount != 0) { // Creating class loaders is a restricted operation // so we need to use a privileged block. AccessController.doPrivileged (new java.security.PrivilegedAction<Object>(){ public Object run(){ for (int i = 0; i < jarCount; i++) { jarList[i] = new JarLoader(UpdateLoader.this, elements[i], vs); } return null; } }); } if (vs != null) { vs.println(MessageService.getTextMessage(MessageId.CM_CLASS_LOADER_START, classpath )); } thisClasspath = classpath; initDone = false; } /** Load the class from the class path. Called by JarLoader when it has a request to load a class to fulfill the sematics of derby.database.classpath. <P> Enforces two restrictions: <UL> <LI> Do not allow classes in certain name spaces to be loaded from installed jars, see RESTRICTED_PACKAGES for the list. <LI> Referencing Derby's internal classes (those outside the public api) from installed is disallowed. This is to stop user defined routines bypassing security or taking advantage of security holes in Derby. E.g. allowing a routine to call a public method in derby would allow such routines to call public static methods for system procedures without having been granted permission on them, such as setting database properties. </UL> @exception ClassNotFoundException Class can not be found or the installed jar is restricted from loading it. */ Class loadClass(String className, boolean resolve) throws ClassNotFoundException { JarLoader jl = null; boolean unlockLoader = false; try { unlockLoader = lockClassLoader(ShExQual.SH); synchronized (this) { if (needReload) { reload(); } Class clazz = checkLoaded(className, resolve); if (clazz != null) return clazz; // Refuse to load classes from restricted name spaces // That is classes in those name spaces can be not // loaded from installed jar files. for (int i = 0; i < RESTRICTED_PACKAGES.length; i++) { if (className.startsWith(RESTRICTED_PACKAGES[i])) throw new ClassNotFoundException(className); } String jvmClassName = className.replace('.', '/').concat(".class"); if (!initDone) initLoaders(); for (int i = 0; i < jarList.length; i++) { jl = jarList[i]; Class c = jl.loadClassData(className, jvmClassName, resolve); if (c != null) { if (vs != null) vs.println(MessageService.getTextMessage(MessageId.CM_CLASS_LOAD, className, jl.getJarName())); return c; } } } return null; } catch (StandardException se) { throw new ClassNotFoundException(MessageService.getTextMessage(MessageId.CM_CLASS_LOAD_EXCEPTION, className, jl == null ? null : jl.getJarName(), se)); } finally { if (unlockLoader) { lf.unlock(compat, this, classLoaderLock, ShExQual.SH); } } } InputStream getResourceAsStream(String name) { InputStream is = (myLoader == null) ? ClassLoader.getSystemResourceAsStream(name) : myLoader.getResourceAsStream(name); if (is != null) return is; // match behaviour of standard class loaders. if (name.endsWith(".class")) return null; boolean unlockLoader = false; try { unlockLoader = lockClassLoader(ShExQual.SH); synchronized (this) { if (needReload) { reload(); } if (!initDone) initLoaders(); for (int i = 0; i < jarList.length; i++) { JarLoader jl = jarList[i]; is = jl.getStream(name); if (is != null) { return is; } } } return null; } catch (StandardException se) { return null; } finally { if (unlockLoader) { lf.unlock(compat, this, classLoaderLock, ShExQual.SH); } } } synchronized void modifyClasspath(String classpath) throws StandardException { // lock transaction classloader exclusively lockClassLoader(ShExQual.EX); version++; modifyJar(false); initializeFromClassPath(classpath); } synchronized void modifyJar(boolean reload) throws StandardException { // lock transaction classloader exclusively lockClassLoader(ShExQual.EX); version++; if (!initDone) return; // first close the existing jar file opens close(); if (reload) { initializeFromClassPath(thisClasspath); } } private boolean lockClassLoader(ShExQual qualifier) throws StandardException { if (lf == null) return false; ClassFactoryContext cfc = (ClassFactoryContext) ContextService.getContextOrNull(ClassFactoryContext.CONTEXT_ID); // This method can be called from outside of the database // engine, in which case tc will be null. In that case // we lock the class loader only for the duration of // the loadClass(). CompatibilitySpace lockSpace = null; if (cfc != null) { lockSpace = cfc.getLockSpace(); } if (lockSpace == null) lockSpace = compat; Object lockGroup = lockSpace.getOwner(); lf.lockObject(lockSpace, lockGroup, classLoaderLock, qualifier, C_LockFactory.TIMED_WAIT); return (lockGroup == this); } Class checkLoaded(String className, boolean resolve) { for (int i = 0; i < jarList.length; i++) { Class c = jarList[i].checkLoaded(className, resolve); if (c != null) return c; } return null; } void close() { for (int i = 0; i < jarList.length; i++) { jarList[i].setInvalid(); } } private void initLoaders() { if (initDone) return; for (int i = 0; i < jarList.length; i++) { jarList[i].initialize(); } initDone = true; } int getClassLoaderVersion() { return version; } synchronized void needReload() { version++; needReload = true; } private void reload() throws StandardException { thisClasspath = getClasspath(); // first close the existing jar file opens close(); initializeFromClassPath(thisClasspath); needReload = false; } private String getClasspath() throws StandardException { ClassFactoryContext cfc = (ClassFactoryContext) ContextService.getContextOrNull(ClassFactoryContext.CONTEXT_ID); PersistentSet ps = cfc.getPersistentSet(); String classpath = PropertyUtil.getServiceProperty(ps, Property.DATABASE_CLASSPATH); // //In per database mode we must always have a classpath. If we do not //yet have one we make one up. if (classpath==null) classpath=""; return classpath; } JarReader getJarReader() { if (jarReader == null) { ClassFactoryContext cfc = (ClassFactoryContext) ContextService.getContextOrNull(ClassFactoryContext.CONTEXT_ID); jarReader = cfc.getJarReader(); } return jarReader; } /** * Tell the lock manager that we don't want timed waits to time out * immediately. * * @return {@code false} */ public boolean noWait() { return false; } public boolean isNestedOwner() { return false; } public boolean nestsUnder( LockOwner other ) { return false; } } class ClassLoaderLock extends ShExLockable { private UpdateLoader myLoader; ClassLoaderLock(UpdateLoader myLoader) { this.myLoader = myLoader; } public void unlockEvent(Latch lockInfo) { super.unlockEvent(lockInfo); if (lockInfo.getQualifier().equals(ShExQual.EX)) { // how do we tell if we are reverting or not myLoader.needReload(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.extensions.yui.calendar; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.Locale; import org.apache.wicket.Session; import org.apache.wicket.datetime.StyleDateConverter; import org.apache.wicket.datetime.markup.html.form.DateTextField; import org.apache.wicket.markup.html.WebPage; import org.apache.wicket.markup.html.form.ChoiceRenderer; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.markup.html.panel.FeedbackPanel; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.PropertyModel; /** * Demonstrates components from the wicket-date project and a bunch of locale fiddling. */ public class DatesPage1 extends WebPage { private static final long serialVersionUID = 1L; /** * Choice for a locale. */ private final class LocaleChoiceRenderer extends ChoiceRenderer<Locale> { private static final long serialVersionUID = 1L; /** * Constructor. */ public LocaleChoiceRenderer() { } /** * @see org.apache.wicket.markup.html.form.IChoiceRenderer#getDisplayValue(Object) */ @Override public Object getDisplayValue(Locale locale) { String enName = locale.getDisplayName(LOCALE_EN); String localizedName = locale.getDisplayName(selectedLocale); return localizedName + (!enName.equals(localizedName) ? (" (" + enName + ")") : ""); } } /** * Dropdown with Locales. */ private final class LocaleDropDownChoice extends DropDownChoice<Locale> { private static final long serialVersionUID = 1L; /** * Construct. * * @param id * component id */ public LocaleDropDownChoice(String id) { super(id); // sort locales on strings of selected locale setChoices(new AbstractReadOnlyModel<List<? extends Locale>>() { private static final long serialVersionUID = 1L; @Override public List<Locale> getObject() { getSelectedLocale(); List<Locale> locales = new ArrayList<Locale>(LOCALES); Collections.sort(locales, new Comparator<Locale>() { public int compare(Locale o1, Locale o2) { return o1.getDisplayName(selectedLocale).compareTo( o2.getDisplayName(selectedLocale)); } }); return locales; } }); setChoiceRenderer(new LocaleChoiceRenderer()); setDefaultModel(new PropertyModel<Locale>(DatesPage1.this, "selectedLocale")); } /** * @see org.apache.wicket.markup.html.form.DropDownChoice#onSelectionChanged(java.lang.Object) */ @Override public void onSelectionChanged(Locale newSelection) { } /** * @see org.apache.wicket.markup.html.form.DropDownChoice#wantOnSelectionChangedNotifications() */ @Override protected boolean wantOnSelectionChangedNotifications() { return true; } } private static final Locale LOCALE_EN = new Locale("en"); private static final List<Locale> LOCALES; static { LOCALES = Arrays.asList(Locale.CANADA, Locale.CANADA_FRENCH, Locale.CHINA, Locale.ENGLISH, Locale.FRANCE, Locale.FRENCH, Locale.GERMAN, Locale.GERMANY, Locale.ITALIAN, Locale.ITALY, Locale.JAPAN, Locale.JAPANESE, Locale.KOREA, Locale.KOREAN, Locale.PRC, Locale.SIMPLIFIED_CHINESE, Locale.TAIWAN, Locale.TRADITIONAL_CHINESE, Locale.UK, Locale.US); } private final Date date = new Date(); private Locale selectedLocale = LOCALE_EN; /** * Constructor */ public DatesPage1() { selectedLocale = Session.get().getLocale(); Form<?> localeForm = new Form<Void>("localeForm"); localeForm.add(new LocaleDropDownChoice("localeSelect")); localeForm.add(new Link<Void>("localeUSLink") { private static final long serialVersionUID = 1L; @Override public void onClick() { selectedLocale = LOCALE_EN; } }); add(localeForm); DateTextField dateTextField = new DateTextField("dateTextField", new PropertyModel<Date>( this, "date"), new StyleDateConverter("S-", true)) { private static final long serialVersionUID = 1L; @Override public Locale getLocale() { return selectedLocale; } }; Form<?> form = new Form<Void>("form") { private static final long serialVersionUID = 1L; @Override protected void onSubmit() { info("set date to " + date); } }; add(form); form.add(dateTextField); dateTextField.add(new DatePicker()); add(new FeedbackPanel("feedback")); } /** * @return the selected locale */ public final Locale getSelectedLocale() { return selectedLocale; } /** * @param selectedLocale */ public final void setSelectedLocale(Locale selectedLocale) { this.selectedLocale = selectedLocale; } }
package org.robolectric.shadows; import static android.os.Build.VERSION_CODES.M; import static android.os.Build.VERSION_CODES.O; import android.annotation.TargetApi; import android.media.AudioAttributes; import android.media.AudioManager; import android.media.AudioPlaybackConfiguration; import android.os.Build.VERSION_CODES; import android.os.Parcel; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; import org.robolectric.util.ReflectionHelpers; @SuppressWarnings({"UnusedDeclaration"}) @Implements(AudioManager.class) public class ShadowAudioManager { public static final int MAX_VOLUME_MUSIC_DTMF = 15; public static final int DEFAULT_MAX_VOLUME = 7; public static final int DEFAULT_VOLUME = 7; public static final int INVALID_VOLUME = 0; public static final int FLAG_NO_ACTION = 0; public static final int[] ALL_STREAMS = { AudioManager.STREAM_MUSIC, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION, AudioManager.STREAM_RING, AudioManager.STREAM_SYSTEM, AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_DTMF }; private AudioFocusRequest lastAudioFocusRequest; private int nextResponseValue = AudioManager.AUDIOFOCUS_REQUEST_GRANTED; private AudioManager.OnAudioFocusChangeListener lastAbandonedAudioFocusListener; private android.media.AudioFocusRequest lastAbandonedAudioFocusRequest; private HashMap<Integer, AudioStream> streamStatus = new HashMap<>(); private List<AudioPlaybackConfiguration> activePlaybackConfigurations = Collections.emptyList(); private int ringerMode = AudioManager.RINGER_MODE_NORMAL; private int mode = AudioManager.MODE_NORMAL; private boolean bluetoothA2dpOn; private boolean isBluetoothScoOn; private boolean isSpeakerphoneOn; private boolean isMicrophoneMuted = false; private boolean isMusicActive; private boolean wiredHeadsetOn; private final Map<String, String> parameters = new HashMap<>(); private final Map<Integer, Boolean> streamsMuteState = new HashMap<>(); public ShadowAudioManager() { for (int stream : ALL_STREAMS) { streamStatus.put(stream, new AudioStream(DEFAULT_VOLUME, DEFAULT_MAX_VOLUME, FLAG_NO_ACTION)); } streamStatus.get(AudioManager.STREAM_MUSIC).setMaxVolume(MAX_VOLUME_MUSIC_DTMF); streamStatus.get(AudioManager.STREAM_DTMF).setMaxVolume(MAX_VOLUME_MUSIC_DTMF); } @Implementation protected int getStreamMaxVolume(int streamType) { AudioStream stream = streamStatus.get(streamType); return (stream != null) ? stream.getMaxVolume() : INVALID_VOLUME; } @Implementation protected int getStreamVolume(int streamType) { AudioStream stream = streamStatus.get(streamType); return (stream != null) ? stream.getCurrentVolume() : INVALID_VOLUME; } @Implementation protected void setStreamVolume(int streamType, int index, int flags) { AudioStream stream = streamStatus.get(streamType); if (stream != null) { stream.setCurrentVolume(index); stream.setFlag(flags); } } @Implementation protected int requestAudioFocus( android.media.AudioManager.OnAudioFocusChangeListener l, int streamType, int durationHint) { lastAudioFocusRequest = new AudioFocusRequest(l, streamType, durationHint); return nextResponseValue; } /** * Provides a mock like interface for the requestAudioFocus method by storing the request object * for later inspection and returning the value specified in setNextFocusRequestResponse. */ @Implementation(minSdk = O) protected int requestAudioFocus(android.media.AudioFocusRequest audioFocusRequest) { lastAudioFocusRequest = new AudioFocusRequest(audioFocusRequest); return nextResponseValue; } @Implementation protected int abandonAudioFocus(AudioManager.OnAudioFocusChangeListener l) { lastAbandonedAudioFocusListener = l; return nextResponseValue; } /** * Provides a mock like interface for the abandonAudioFocusRequest method by storing the request * object for later inspection and returning the value specified in setNextFocusRequestResponse. */ @Implementation(minSdk = O) protected int abandonAudioFocusRequest(android.media.AudioFocusRequest audioFocusRequest) { lastAbandonedAudioFocusRequest = audioFocusRequest; return nextResponseValue; } @Implementation protected int getRingerMode() { return ringerMode; } @Implementation protected void setRingerMode(int ringerMode) { if (!AudioManager.isValidRingerMode(ringerMode)) { return; } this.ringerMode = ringerMode; } public static boolean isValidRingerMode(int ringerMode) { return ringerMode >= 0 && ringerMode <= (int) ReflectionHelpers.getStaticField(AudioManager.class, "RINGER_MODE_MAX"); } @Implementation protected void setMode(int mode) { this.mode = mode; } @Implementation protected int getMode() { return this.mode; } public void setStreamMaxVolume(int streamMaxVolume) { for (Map.Entry<Integer, AudioStream> entry : streamStatus.entrySet()) { entry.getValue().setMaxVolume(streamMaxVolume); } } public void setStreamVolume(int streamVolume) { for (Map.Entry<Integer, AudioStream> entry : streamStatus.entrySet()) { entry.getValue().setCurrentVolume(streamVolume); } } @Implementation protected void setWiredHeadsetOn(boolean on) { wiredHeadsetOn = on; } @Implementation protected boolean isWiredHeadsetOn() { return wiredHeadsetOn; } @Implementation protected void setBluetoothA2dpOn(boolean on) { bluetoothA2dpOn = on; } @Implementation protected boolean isBluetoothA2dpOn() { return bluetoothA2dpOn; } @Implementation protected void setSpeakerphoneOn(boolean on) { isSpeakerphoneOn = on; } @Implementation protected boolean isSpeakerphoneOn() { return isSpeakerphoneOn; } @Implementation protected void setMicrophoneMute(boolean on) { isMicrophoneMuted = on; } @Implementation protected boolean isMicrophoneMute() { return isMicrophoneMuted; } @Implementation protected boolean isBluetoothScoOn() { return isBluetoothScoOn; } @Implementation protected void setBluetoothScoOn(boolean isBluetoothScoOn) { this.isBluetoothScoOn = isBluetoothScoOn; } @Implementation protected boolean isMusicActive() { return isMusicActive; } @Implementation(minSdk = O) protected List<AudioPlaybackConfiguration> getActivePlaybackConfigurations() { return new ArrayList<>(activePlaybackConfigurations); } @Implementation protected void setParameters(String keyValuePairs) { if (keyValuePairs.isEmpty()) { throw new IllegalArgumentException("keyValuePairs should not be empty"); } if (keyValuePairs.charAt(keyValuePairs.length() - 1) != ';') { throw new IllegalArgumentException("keyValuePairs should end with a ';'"); } String[] pairs = keyValuePairs.split(";", 0); for (String pair : pairs) { if (pair.isEmpty()) { continue; } String[] splittedPair = pair.split("=", 0); if (splittedPair.length != 2) { throw new IllegalArgumentException( "keyValuePairs: each pair should be in the format of key=value;"); } parameters.put(splittedPair[0], splittedPair[1]); } } /** * The expected composition for keys is not well defined. * * <p>For testing purposes this method call always returns null. */ @Implementation protected String getParameters(String keys) { return null; } /** Returns a single parameter that was set via {@link #setParameters(String)}. */ public String getParameter(String key) { return parameters.get(key); } /** * Implements {@link AudioManager#adjustStreamVolume(int, int, int)}. * * <p>Currently supports only the directions {@link AudioManager#ADJUST_MUTE} and {@link * AudioManager#ADJUST_UNMUTE}. */ @Implementation protected void adjustStreamVolume(int streamType, int direction, int flags) { switch (direction) { case AudioManager.ADJUST_MUTE: streamsMuteState.put(streamType, true); break; case AudioManager.ADJUST_UNMUTE: streamsMuteState.put(streamType, false); break; default: break; } } @Implementation(minSdk = M) protected boolean isStreamMute(int streamType) { if (!streamsMuteState.containsKey(streamType)) { return false; } return streamsMuteState.get(streamType); } public void setIsStreamMute(int streamType, boolean isMuted) { streamsMuteState.put(streamType, isMuted); } /** * Sets active playback configurations that will be served by {@link * AudioManager#getActivePlaybackConfigurations}. * * <p>Note that there is no public {@link AudioPlaybackConfiguration} constructor, so the * configurations returned are specified by their audio attributes only. */ @TargetApi(VERSION_CODES.O) public void setActivePlaybackConfigurationsFor(List<AudioAttributes> audioAttributes) { activePlaybackConfigurations = new ArrayList<>(audioAttributes.size()); for (AudioAttributes audioAttribute : audioAttributes) { Parcel p = Parcel.obtain(); p.writeInt(0); // mPlayerIId p.writeInt(0); // mPlayerType p.writeInt(0); // mClientUid p.writeInt(0); // mClientPid p.writeInt(AudioPlaybackConfiguration.PLAYER_STATE_STARTED); // mPlayerState audioAttribute.writeToParcel(p, 0); p.writeStrongInterface(null); byte[] bytes = p.marshall(); p.recycle(); p = Parcel.obtain(); p.unmarshall(bytes, 0, bytes.length); p.setDataPosition(0); AudioPlaybackConfiguration configuration = AudioPlaybackConfiguration.CREATOR.createFromParcel(p); p.recycle(); activePlaybackConfigurations.add(configuration); } } public void setIsMusicActive(boolean isMusicActive) { this.isMusicActive = isMusicActive; } public AudioFocusRequest getLastAudioFocusRequest() { return lastAudioFocusRequest; } public void setNextFocusRequestResponse(int nextResponseValue) { this.nextResponseValue = nextResponseValue; } public AudioManager.OnAudioFocusChangeListener getLastAbandonedAudioFocusListener() { return lastAbandonedAudioFocusListener; } public android.media.AudioFocusRequest getLastAbandonedAudioFocusRequest() { return lastAbandonedAudioFocusRequest; } public static class AudioFocusRequest { public final AudioManager.OnAudioFocusChangeListener listener; public final int streamType; public final int durationHint; public final android.media.AudioFocusRequest audioFocusRequest; private AudioFocusRequest( AudioManager.OnAudioFocusChangeListener listener, int streamType, int durationHint) { this.listener = listener; this.streamType = streamType; this.durationHint = durationHint; this.audioFocusRequest = null; } private AudioFocusRequest(android.media.AudioFocusRequest audioFocusRequest) { this.listener = null; this.streamType = this.durationHint = -1; this.audioFocusRequest = audioFocusRequest; } } private static class AudioStream { private int currentVolume; private int maxVolume; private int flag; public AudioStream(int currVol, int maxVol, int flag) { setCurrentVolume(currVol); setMaxVolume(maxVol); setFlag(flag); } public int getCurrentVolume() { return currentVolume; } public int getMaxVolume() { return maxVolume; } public int getFlag() { return flag; } public void setCurrentVolume(int vol) { if (vol > maxVolume) { vol = maxVolume; } else if (vol < 0) { vol = 0; } currentVolume = vol; } public void setMaxVolume(int vol) { maxVolume = vol; } public void setFlag(int flag) { this.flag = flag; } } }
/* * The MIT License * * Copyright 2013 Jesse Glick. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import hudson.Extension; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.BuildTrigger; import hudson.tasks.BuildWrapper; import hudson.tasks.BuildWrapperDescriptor; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.triggers.Trigger; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.List; import org.apache.commons.io.FileUtils; import static org.hamcrest.core.StringContains.containsString; import static org.junit.Assert.*; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.MockFolder; import org.jvnet.hudson.test.TestExtension; import org.jvnet.hudson.test.recipes.LocalData; public class ItemGroupMixInTest { @Rule public JenkinsRule r = new JenkinsRule(); @Issue("JENKINS-20951") @LocalData @Test public void xmlFileReadCannotResolveClassException() throws Exception { MockFolder d = r.jenkins.getItemByFullName("d", MockFolder.class); assertNotNull(d); Collection<TopLevelItem> items = d.getItems(); assertEquals(1, items.size()); assertEquals("valid", items.iterator().next().getName()); } /** * This test unit makes sure that if part of the config.xml file is * deleted it will still load everything else inside the folder. * The test unit expects an IOException is thrown, and the one failed * job fails to load. */ @Issue("JENKINS-22811") @Test public void xmlFileFailsToLoad() throws Exception { MockFolder folder = r.createFolder("folder"); assertNotNull(folder); AbstractProject project = folder.createProject(FreeStyleProject.class, "job1"); AbstractProject project2 = folder.createProject(FreeStyleProject.class, "job2"); AbstractProject project3 = folder.createProject(FreeStyleProject.class, "job3"); File configFile = project.getConfigFile().getFile(); List<String> lines = FileUtils.readLines(configFile).subList(0, 5); configFile.delete(); // Remove half of the config.xml file to make "invalid" or fail to load FileUtils.writeByteArrayToFile(configFile, lines.toString().getBytes()); for (int i = lines.size() / 2; i < lines.size(); i++) { FileUtils.writeStringToFile(configFile, lines.get(i), true); } // Reload Jenkins. r.jenkins.reload(); // Folder assertNotNull("Folder failed to load.", r.jenkins.getItemByFullName("folder")); assertNull("Job should have failed to load.", r.jenkins.getItemByFullName("folder/job1")); assertNotNull("Other job in folder should have loaded.", r.jenkins.getItemByFullName("folder/job2")); assertNotNull("Other job in folder should have loaded.", r.jenkins.getItemByFullName("folder/job3")); } /** * This test unit makes sure that jobs that contain bad get*Action methods will continue to * load the project. */ @LocalData @Issue("JENKINS-22811") @Test public void xmlFileReadExceptionOnLoad() throws Exception { MockFolder d = r.jenkins.getItemByFullName("d", MockFolder.class); assertNotNull(d); Collection<TopLevelItem> items = d.getItems(); assertEquals(5, items.size()); } @TestExtension public static class MockBuildWrapperThrowsError extends BuildWrapper { @Override public Collection<? extends Action> getProjectActions(AbstractProject project){ throw new NullPointerException(); } @Extension public static class DescriptorImpl extends BuildWrapperDescriptor { @Override public boolean isApplicable(AbstractProject<?, ?> item) { return true; } } } @TestExtension public static class MockBuilderThrowsError extends Builder { @Override public Collection<? extends Action> getProjectActions(AbstractProject project){ throw new NullPointerException(); } @Extension public static final Descriptor DESCRIPTOR = new DescriptorImpl(); public static class DescriptorImpl extends BuildStepDescriptor { @Override public boolean isApplicable(Class jobType) { return false; } } } @TestExtension public static class MockBuildTriggerThrowsError extends Trigger { @Override public Collection<? extends Action> getProjectActions() { throw new NullPointerException(); } @Extension public static final Descriptor DESCRIPTOR = new BuildTrigger.DescriptorImpl(); } @TestExtension public static class MockPublisherThrowsError extends Publisher { @Override public Collection<? extends Action> getProjectActions(AbstractProject project) { throw new NullPointerException(); } @Override public BuildStepMonitor getRequiredMonitorService() { return null; } @Extension public static final Descriptor DESCRIPTOR = new DescriptorImpl(); public static class DescriptorImpl extends BuildStepDescriptor { @Override public boolean isApplicable(Class jobType) { return false; } } } @Test public void createProjectFromXMLShouldNoCreateEntities() throws IOException { final String xml = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<!DOCTYPE project[\n" + " <!ENTITY foo SYSTEM \"file:///\">\n" + "]>\n" + "<project>\n" + " <actions/>\n" + " <description>&foo;</description>\n" + " <keepDependencies>false</keepDependencies>\n" + " <properties/>\n" + " <scm class=\"hudson.scm.NullSCM\"/>\n" + " <canRoam>true</canRoam>\n" + " <triggers/>\n" + " <builders/>\n" + " <publishers/>\n" + " <buildWrappers/>\n" + "</project>"; Item foo = r.jenkins.createProjectFromXML("foo", new ByteArrayInputStream(xml.getBytes())); // if no exception then JAXP is swallowing these - so there should be no entity in the description. assertThat(Items.getConfigFile(foo).asString(), containsString("<description/>")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cache.query.*; import org.apache.ignite.cache.query.annotations.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.junits.common.*; import javax.cache.*; import java.io.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import static org.apache.ignite.cache.CacheAtomicityMode.*; import static org.apache.ignite.cache.CacheMode.*; /** * Based on Yardstick benchmark. */ public class IgniteCacheOffheapTieredMultithreadedSelfTest extends GridCommonAbstractTest { /** */ private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private static final int RANGE = 1_000_000; /** */ private static IgniteCache<Integer, Object> cache; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(ipFinder); cfg.setDiscoverySpi(disco); CacheConfiguration<?,?> cacheCfg = new CacheConfiguration<>(); cacheCfg.setCacheMode(PARTITIONED); cacheCfg.setAtomicityMode(ATOMIC); cacheCfg.setSwapEnabled(false); cacheCfg.setBackups(1); cacheCfg.setMemoryMode(CacheMemoryMode.OFFHEAP_TIERED); cacheCfg.setIndexedTypes( Integer.class, Person.class ); cfg.setCacheConfiguration(cacheCfg); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGridsMultiThreaded(3); cache = grid(0).cache(null); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** * @throws Exception If failed. */ public void testQueryPut() throws Exception { final AtomicBoolean end = new AtomicBoolean(); IgniteInternalFuture<?> fut = multithreadedAsync(new Callable<Void>() { @Override public Void call() throws Exception { ThreadLocalRandom rnd = ThreadLocalRandom.current(); while(!end.get()) { if (rnd.nextInt(5) == 0) { double salary = rnd.nextDouble() * RANGE * 1000; double maxSalary = salary + 1000; Collection<Cache.Entry<Integer, Object>> entries = executeQuery(salary, maxSalary); for (Cache.Entry<Integer, Object> entry : entries) { Person p = (Person)entry.getValue(); if (p.getSalary() < salary || p.getSalary() > maxSalary) throw new Exception("Invalid person retrieved [min=" + salary + ", max=" + maxSalary + ", person=" + p + ']'); } } else { int i = rnd.nextInt(RANGE); cache.put(i, new Person(i, "firstName" + i, "lastName" + i, i * 1000)); } } return null; } }, 64); Thread.sleep(3 * 60 * 1000); end.set(true); fut.get(); } /** * @param minSalary Min salary. * @param maxSalary Max salary. * @return Query result. * @throws Exception If failed. */ private Collection<Cache.Entry<Integer, Object>> executeQuery(double minSalary, double maxSalary) throws Exception { SqlQuery qry = new SqlQuery(Person.class, "salary >= ? and salary <= ?"); qry.setArgs(minSalary, maxSalary); return cache.query(qry).getAll(); } /** * Person record used for query test. */ public static class Person implements Externalizable { /** Person ID. */ @QuerySqlField(index = true) private int id; /** Organization ID. */ @QuerySqlField(index = true) private int orgId; /** First name (not-indexed). */ @QuerySqlField private String firstName; /** Last name (not indexed). */ @QuerySqlField private String lastName; /** Salary. */ @QuerySqlField(index = true) private double salary; /** * Constructs empty person. */ public Person() { // No-op. } /** * Constructs person record that is not linked to any organization. * * @param id Person ID. * @param firstName First name. * @param lastName Last name. * @param salary Salary. */ public Person(int id, String firstName, String lastName, double salary) { this(id, 0, firstName, lastName, salary); } /** * Constructs person record. * * @param id Person ID. * @param orgId Organization ID. * @param firstName First name. * @param lastName Last name. * @param salary Salary. */ public Person(int id, int orgId, String firstName, String lastName, double salary) { this.id = id; this.orgId = orgId; this.firstName = firstName; this.lastName = lastName; this.salary = salary; } /** * @return Person id. */ public int getId() { return id; } /** * @param id Person id. */ public void setId(int id) { this.id = id; } /** * @return Organization id. */ public int getOrganizationId() { return orgId; } /** * @param orgId Organization id. */ public void setOrganizationId(int orgId) { this.orgId = orgId; } /** * @return Person first name. */ public String getFirstName() { return firstName; } /** * @param firstName Person first name. */ public void setFirstName(String firstName) { this.firstName = firstName; } /** * @return Person last name. */ public String getLastName() { return lastName; } /** * @param lastName Person last name. */ public void setLastName(String lastName) { this.lastName = lastName; } /** * @return Salary. */ public double getSalary() { return salary; } /** * @param salary Salary. */ public void setSalary(double salary) { this.salary = salary; } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(id); out.writeInt(orgId); out.writeUTF(firstName); out.writeUTF(lastName); out.writeDouble(salary); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { id = in.readInt(); orgId = in.readInt(); firstName = in.readUTF(); lastName = in.readUTF(); salary = in.readDouble(); } /** {@inheritDoc} */ @Override public boolean equals(Object o) { return this == o || (o instanceof Person) && id == ((Person)o).id; } /** {@inheritDoc} */ @Override public int hashCode() { return id; } /** {@inheritDoc} */ @Override public String toString() { return "Person [firstName=" + firstName + ", id=" + id + ", orgId=" + orgId + ", lastName=" + lastName + ", salary=" + salary + ']'; } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.compute; import com.microsoft.azure.management.resources.ResourceGroup; import com.microsoft.azure.management.resources.fluentcore.arm.Region; import com.microsoft.azure.management.resources.fluentcore.model.Creatable; import com.microsoft.rest.RestClient; import org.junit.Assert; import org.junit.Test; import java.util.Map; public class VirtualMachineManagedDiskOperationsTests extends ComputeManagementTest { private static String RG_NAME = ""; private static Region region = Region.US_EAST; private static KnownLinuxVirtualMachineImage linuxImage = KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS; @Override protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) { RG_NAME = generateRandomResourceName("javacsmrg", 15); super.initializeClients(restClient, defaultSubscription, domain); } @Override protected void cleanUpResources() { resourceManager.resourceGroups().beginDeleteByName(RG_NAME); } @Test public void canCreateVirtualMachineFromPIRImageWithManagedOsDisk() { final String vmName1 = "myvm1"; final String publicIpDnsLabel = generateRandomResourceName("pip", 20); final String uname = "juser"; final String password = "123tEst!@|ac"; VirtualMachine virtualMachine = computeManager.virtualMachines() .define(vmName1) .withRegion(region) .withNewResourceGroup(RG_NAME) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withNewPrimaryPublicIPAddress(publicIpDnsLabel) .withPopularLinuxImage(linuxImage) .withRootUsername(uname) .withRootPassword(password) .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); // Ensure default to managed disk // Assert.assertTrue(virtualMachine.isManagedDiskEnabled()); // Validate caching, size and the default storage account type set for the managed disk // backing os disk // Assert.assertNotNull(virtualMachine.osDiskStorageAccountType()); Assert.assertEquals(virtualMachine.osDiskCachingType(), CachingTypes.READ_WRITE); Assert.assertEquals(virtualMachine.size(), VirtualMachineSizeTypes.STANDARD_D5_V2); // Validate the implicit managed disk created by CRP to back the os disk // Assert.assertNotNull(virtualMachine.osDiskId()); Disk osDisk = computeManager.disks().getById(virtualMachine.osDiskId()); Assert.assertTrue(osDisk.isAttachedToVirtualMachine()); Assert.assertEquals(osDisk.osType(), OperatingSystemTypes.LINUX); // Check the auto created public ip // String publicIpId = virtualMachine.getPrimaryPublicIPAddressId(); Assert.assertNotNull(publicIpId); // Validates the options which are valid only for native disks // Assert.assertNull(virtualMachine.osUnmanagedDiskVhdUri()); Assert.assertNotNull(virtualMachine.unmanagedDataDisks()); Assert.assertTrue(virtualMachine.unmanagedDataDisks().size() == 0); } @Test public void canCreateUpdateVirtualMachineWithEmptyManagedDataDisks() { final String publicIpDnsLabel = generateRandomResourceName("pip", 20); final String uname = "juser"; final String password = "123tEst!@|ac"; // Create with implicit + explicit empty disks, check default and override // final String vmName1 = "myvm1"; final String explicitlyCreatedEmptyDiskName1 = generateRandomResourceName(vmName1 + "_mdisk_", 25); final String explicitlyCreatedEmptyDiskName2 = generateRandomResourceName(vmName1 + "_mdisk_", 25); final String explicitlyCreatedEmptyDiskName3 = generateRandomResourceName(vmName1 + "_mdisk_", 25); ResourceGroup resourceGroup = resourceManager.resourceGroups() .define(RG_NAME) .withRegion(region) .create(); Creatable<Disk> creatableEmptyDisk1 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); Creatable<Disk> creatableEmptyDisk2 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName2) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); Creatable<Disk> creatableEmptyDisk3 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName3) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); VirtualMachine virtualMachine = computeManager.virtualMachines() .define(vmName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withNewPrimaryPublicIPAddress(publicIpDnsLabel) .withPopularLinuxImage(linuxImage) .withRootUsername(uname) .withRootPassword(password) // Start: Add 5 empty managed disks .withNewDataDisk(100) // CreateOption: EMPTY .withNewDataDisk(100, 1, CachingTypes.READ_ONLY) // CreateOption: EMPTY .withNewDataDisk(creatableEmptyDisk1) // CreateOption: ATTACH .withNewDataDisk(creatableEmptyDisk2, 2, CachingTypes.NONE) // CreateOption: ATTACH .withNewDataDisk(creatableEmptyDisk3, 3, CachingTypes.NONE) // CreateOption: ATTACH // End : Add 5 empty managed disks .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); Assert.assertTrue(virtualMachine.isManagedDiskEnabled()); // There should not be any un-managed data disks // Assert.assertNotNull(virtualMachine.unmanagedDataDisks()); Assert.assertEquals(virtualMachine.unmanagedDataDisks().size(), 0); // Validate the managed data disks // Map<Integer, VirtualMachineDataDisk> dataDisks = virtualMachine.dataDisks(); Assert.assertNotNull(dataDisks); Assert.assertTrue(dataDisks.size() == 5); Assert.assertTrue(dataDisks.containsKey(1)); VirtualMachineDataDisk dataDiskLun1 = dataDisks.get(1); Assert.assertNotNull(dataDiskLun1.id()); Assert.assertEquals(dataDiskLun1.cachingType(), CachingTypes.READ_ONLY); Assert.assertEquals(dataDiskLun1.size(), 100); Assert.assertTrue(dataDisks.containsKey(2)); VirtualMachineDataDisk dataDiskLun2 = dataDisks.get(2); Assert.assertNotNull(dataDiskLun2.id()); Assert.assertEquals(dataDiskLun2.cachingType(), CachingTypes.NONE); Assert.assertEquals(dataDiskLun2.size(), 150); Assert.assertTrue(dataDisks.containsKey(3)); VirtualMachineDataDisk dataDiskLun3 = dataDisks.get(3); Assert.assertNotNull(dataDiskLun3.id()); Assert.assertEquals(dataDiskLun3.cachingType(), CachingTypes.NONE); Assert.assertEquals(dataDiskLun3.size(), 150); // Validate the defaults assigned // for (VirtualMachineDataDisk dataDisk : dataDisks.values()) { if (dataDisk.lun() != 1 && dataDisk.lun() != 2 && dataDisk.lun() != 3) { Assert.assertEquals(dataDisk.cachingType(), CachingTypes.READ_WRITE); Assert.assertEquals(dataDisk.storageAccountType(), StorageAccountTypes.STANDARD_LRS); } } // Updating and adding disk as part of VM Update seems consistency failing, CRP is aware of // this, hence until it is fixed comment-out the test // // { // "startTime": "2017-01-26T05:48:59.9290573+00:00", // "endTime": "2017-01-26T05:49:02.2884052+00:00", // "status": "Failed", // "error": { // "code": "InternalExecutionError", // "message": "An internal execution error occurred." // }, // "name": "bc8072a7-38bb-445b-ae59-f16cf125342c" // } // // virtualMachine.deallocate(); // // virtualMachine.update() // .withDataDiskUpdated(1, 200) // .withDataDiskUpdated(2, 200, CachingTypes.READ_WRITE) // .withNewDataDisk(60) // .apply(); // // Assert.assertTrue(virtualMachine.isManagedDiskEnabled()); // // There should not be any un-managed data disks // // // Assert.assertNotNull(virtualMachine.unmanagedDataDisks()); // Assert.assertEquals(virtualMachine.unmanagedDataDisks().size(), 0); // // // Validate the managed data disks // // // dataDisks = virtualMachine.dataDisks(); // Assert.assertNotNull(dataDisks); // Assert.assertTrue(dataDisks.size() == 6); // Assert.assertTrue(dataDisks.containsKey(1)); // dataDiskLun1 = dataDisks.get(1); // Assert.assertNotNull(dataDiskLun1.id()); // Assert.assertEquals(dataDiskLun1.cachingType(), CachingTypes.READ_ONLY); // Assert.assertEquals(dataDiskLun1.size(), 200); // 100 -> 200 // // Assert.assertTrue(dataDisks.containsKey(2)); // dataDiskLun2 = dataDisks.get(2); // Assert.assertNotNull(dataDiskLun2.id()); // Assert.assertEquals(dataDiskLun2.cachingType(), CachingTypes.READ_WRITE); // NONE -> READ_WRITE // Assert.assertEquals(dataDiskLun2.size(), 200); // 150 -> 200 // // Assert.assertTrue(dataDisks.containsKey(3)); // dataDiskLun3 = dataDisks.get(3); // Assert.assertNotNull(dataDiskLun3.id()); // Assert.assertEquals(dataDiskLun3.cachingType(), CachingTypes.NONE); // Assert.assertEquals(dataDiskLun3.size(), 150); // // // Ensure defaults of other disks are not affected // for (VirtualMachineDataDisk dataDisk : dataDisks.values()) { // if (dataDisk.lun() != 1 && dataDisk.lun() != 3) { // Assert.assertEquals(dataDisk.cachingType(), CachingTypes.READ_WRITE); // Assert.assertEquals(dataDisk.storageAccountType(), StorageAccountTypes.STANDARD_LRS); // } // } } @Test public void canCreateVirtualMachineFromCustomImageWithManagedDisks() { final String publicIpDnsLabel = generateRandomResourceName("pip", 20); final String uname = "juser"; final String password = "123tEst!@|ac"; // Create with implicit + explicit empty disks, check default and override // final String vmName1 = "myvm1"; final String explicitlyCreatedEmptyDiskName1 = generateRandomResourceName(vmName1 + "_mdisk_", 25); final String explicitlyCreatedEmptyDiskName2 = generateRandomResourceName(vmName1 + "_mdisk_", 25); final String explicitlyCreatedEmptyDiskName3 = generateRandomResourceName(vmName1 + "_mdisk_", 25); ResourceGroup resourceGroup = resourceManager.resourceGroups() .define(RG_NAME) .withRegion(region) .create(); Creatable<Disk> creatableEmptyDisk1 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); Creatable<Disk> creatableEmptyDisk2 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName2) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); Creatable<Disk> creatableEmptyDisk3 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName3) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); VirtualMachine virtualMachine1 = computeManager.virtualMachines() .define(vmName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withNewPrimaryPublicIPAddress(publicIpDnsLabel) .withPopularLinuxImage(linuxImage) .withRootUsername(uname) .withRootPassword(password) // Start: Add bunch of empty managed disks .withNewDataDisk(100) // CreateOption: EMPTY .withNewDataDisk(100, 1, CachingTypes.READ_ONLY) // CreateOption: EMPTY .withNewDataDisk(creatableEmptyDisk1) // CreateOption: ATTACH .withNewDataDisk(creatableEmptyDisk2, 2, CachingTypes.NONE) // CreateOption: ATTACH .withNewDataDisk(creatableEmptyDisk3, 3, CachingTypes.NONE) // CreateOption: ATTACH // End : Add bunch of empty managed disks .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); System.out.println("Waiting for some time before de-provision"); sleep(60 * 1000); // Wait for some time to ensure vm is publicly accessible deprovisionAgentInLinuxVM(virtualMachine1.getPrimaryPublicIPAddress().fqdn(), 22, uname, password); virtualMachine1.deallocate(); virtualMachine1.generalize(); final String customImageName = generateRandomResourceName("img-", 10); VirtualMachineCustomImage customImage = computeManager.virtualMachineCustomImages().define(customImageName) .withRegion(region) .withExistingResourceGroup(resourceGroup) .fromVirtualMachine(virtualMachine1) .create(); Assert.assertNotNull(customImage); Assert.assertNotNull(customImage.sourceVirtualMachineId()); Assert.assertTrue(customImage.sourceVirtualMachineId().equalsIgnoreCase(virtualMachine1.id().toLowerCase())); Assert.assertNotNull(customImage.osDiskImage()); Assert.assertEquals(customImage.osDiskImage().osState(), OperatingSystemStateTypes.GENERALIZED); Assert.assertEquals(customImage.osDiskImage().osType(), OperatingSystemTypes.LINUX); Assert.assertNotNull(customImage.dataDiskImages()); Assert.assertEquals(customImage.dataDiskImages().size(), 5); for (ImageDataDisk imageDataDisk : customImage.dataDiskImages().values()) { Assert.assertNull(imageDataDisk.blobUri()); Assert.assertNotNull(imageDataDisk.managedDisk().id()); } // Create virtual machine from the custom image // This one relies on CRP's capability to create implicit data disks from the virtual machine // image data disk images. // final String vmName2 = "myvm2"; VirtualMachine virtualMachine2 = computeManager.virtualMachines() .define(vmName2) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withLinuxCustomImage(customImage.id()) .withRootUsername(uname) .withRootPassword(password) // No explicit data disks, let CRP create it from the image's data disk images .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); Map<Integer, VirtualMachineDataDisk> dataDisks = virtualMachine2.dataDisks(); Assert.assertNotNull(dataDisks); Assert.assertEquals(dataDisks.size(), customImage.dataDiskImages().size()); for (ImageDataDisk imageDataDisk : customImage.dataDiskImages().values()) { Assert.assertTrue(dataDisks.containsKey(imageDataDisk.lun())); VirtualMachineDataDisk dataDisk = dataDisks.get(imageDataDisk.lun()); Assert.assertEquals(dataDisk.cachingType(), imageDataDisk.caching()); // Fails due to CRP bug: Managed disk size is not returned on gets. // Assert.assertEquals(dataDisk.size(), (long) imageDataDisk.diskSizeGB()); } // Create virtual machine from the custom image // This one override the size and caching type of data disks from data disk images and // adds one additional disk // final String vmName3 = "myvm3"; VirtualMachine.DefinitionStages.WithManagedCreate creatableVirtualMachine3 = computeManager.virtualMachines() .define(vmName3) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withLinuxCustomImage(customImage.id()) .withRootUsername(uname) .withRootPassword(password); for (ImageDataDisk dataDiskImage : customImage.dataDiskImages().values()) { // Explicitly override the properties of the data disks created from disk image // // CreateOption: FROM_IMAGE VirtualMachineDataDisk dataDisk = dataDisks.get(dataDiskImage.lun()); creatableVirtualMachine3.withNewDataDiskFromImage(dataDiskImage.lun(), dataDisk.size() + 10, // increase size by 10 GB CachingTypes.READ_ONLY); } VirtualMachine virtualMachine3 = creatableVirtualMachine3 .withNewDataDisk(200) // CreateOption: EMPTY .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); dataDisks = virtualMachine3.dataDisks(); Assert.assertNotNull(dataDisks); Assert.assertEquals(dataDisks.size(), customImage.dataDiskImages().size() + 1 /* count one extra empty disk */); for (ImageDataDisk imageDataDisk : customImage.dataDiskImages().values()) { Assert.assertTrue(dataDisks.containsKey(imageDataDisk.lun())); VirtualMachineDataDisk dataDisk = dataDisks.get(imageDataDisk.lun()); Assert.assertEquals(dataDisk.cachingType(), CachingTypes.READ_ONLY); // Fails due to CRP bug: Managed disk size is not returned on gets. // Assert.assertEquals(dataDisk.size(), (long) imageDataDisk.diskSizeGB() + 10); } } @Test public void canUpdateVirtualMachineByAddingAndRemovingManagedDisks() { final String publicIpDnsLabel = generateRandomResourceName("pip", 20); final String uname = "juser"; final String password = "123tEst!@|ac"; // Create with implicit + explicit empty disks, check default and override // final String vmName1 = "myvm1"; final String explicitlyCreatedEmptyDiskName1 = generateRandomResourceName(vmName1 + "_mdisk_", 25); final String explicitlyCreatedEmptyDiskName2 = generateRandomResourceName(vmName1 + "_mdisk_", 25); final String explicitlyCreatedEmptyDiskName3 = generateRandomResourceName(vmName1 + "_mdisk_", 25); ResourceGroup resourceGroup = resourceManager.resourceGroups() .define(RG_NAME) .withRegion(region) .create(); Creatable<Disk> creatableEmptyDisk1 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); Creatable<Disk> creatableEmptyDisk2 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName2) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); Creatable<Disk> creatableEmptyDisk3 = computeManager.disks() .define(explicitlyCreatedEmptyDiskName3) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withData() .withSizeInGB(150); VirtualMachine virtualMachine1 = computeManager.virtualMachines().define(vmName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withNewPrimaryPublicIPAddress(publicIpDnsLabel) .withPopularLinuxImage(linuxImage) .withRootUsername(uname) .withRootPassword(password) // Start: Add bunch of empty managed disks .withNewDataDisk(100) // CreateOption: EMPTY .withNewDataDisk(100, 1, CachingTypes.READ_WRITE) // CreateOption: EMPTY .withNewDataDisk(creatableEmptyDisk1) // CreateOption: ATTACH .withNewDataDisk(creatableEmptyDisk2, 2, CachingTypes.NONE) // CreateOption: ATTACH .withNewDataDisk(creatableEmptyDisk3, 3, CachingTypes.NONE) // CreateOption: ATTACH // End : Add bunch of empty managed disks .withDataDiskDefaultCachingType(CachingTypes.READ_ONLY) .withDataDiskDefaultStorageAccountType(StorageAccountTypes.STANDARD_LRS) .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); virtualMachine1.update() .withoutDataDisk(1) .withNewDataDisk(100, 6, CachingTypes.READ_WRITE) // CreateOption: EMPTY .apply(); Map<Integer, VirtualMachineDataDisk> dataDisks = virtualMachine1.dataDisks(); Assert.assertNotNull(dataDisks); Assert.assertEquals(dataDisks.size(), 5); // Removed one added another Assert.assertTrue(dataDisks.containsKey(6)); Assert.assertFalse(dataDisks.containsKey(1)); } @Test public void canCreateVirtualMachineByAttachingManagedOsDisk() { final String uname = "juser"; final String password = "123tEst!@|ac"; final String vmName = "myvm6"; // Creates a native virtual machine // VirtualMachine nativeVm = computeManager.virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(RG_NAME) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withLatestLinuxImage("Canonical", "UbuntuServer", "14.04.2-LTS") .withRootUsername(uname) .withRootPassword(password) .withUnmanagedDisks() /* UN-MANAGED OS and DATA DISKS */ .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withNewStorageAccount(generateRandomResourceName("stg", 17)) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); Assert.assertFalse(nativeVm.isManagedDiskEnabled()); String osVhdUri = nativeVm.osUnmanagedDiskVhdUri(); Assert.assertNotNull(osVhdUri); computeManager.virtualMachines().deleteById(nativeVm.id()); final String diskName = generateRandomResourceName("dsk-", 15); Disk osDisk = computeManager.disks().define(diskName) .withRegion(region) .withExistingResourceGroup(RG_NAME) .withLinuxFromVhd(osVhdUri) .create(); // Creates a managed virtual machine // VirtualMachine managedVm = computeManager.virtualMachines() .define(vmName) .withRegion(region) .withExistingResourceGroup(RG_NAME) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withSpecializedOSDisk(osDisk, OperatingSystemTypes.LINUX) .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); Assert.assertTrue(managedVm.isManagedDiskEnabled()); Assert.assertTrue(managedVm.osDiskId().equalsIgnoreCase(osDisk.id().toLowerCase())); } @Test public void canCreateVirtualMachineWithManagedDiskInManagedAvailabilitySet() { final String availSetName = generateRandomResourceName("av-", 15); final String uname = "juser"; final String password = "123tEst!@|ac"; final String vmName = "myvm6"; VirtualMachine managedVm = computeManager.virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(RG_NAME) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(linuxImage) .withRootUsername(uname) .withRootPassword(password) .withNewDataDisk(100) .withNewDataDisk(100, 1, CachingTypes.READ_ONLY) .withNewDataDisk(100, 2, CachingTypes.READ_WRITE, StorageAccountTypes.STANDARD_LRS) .withNewAvailabilitySet(availSetName) // Default to managed availability set .withSize(VirtualMachineSizeTypes.STANDARD_D5_V2) .withOSDiskCaching(CachingTypes.READ_WRITE) .create(); Assert.assertNotNull(managedVm.availabilitySetId()); AvailabilitySet availabilitySet = computeManager.availabilitySets().getById(managedVm.availabilitySetId()); Assert.assertTrue(availabilitySet.virtualMachineIds().size() > 0); Assert.assertEquals(availabilitySet.sku(), AvailabilitySetSkuTypes.MANAGED); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.jexl3.internal.introspection; import org.apache.commons.jexl3.JexlArithmetic; import org.apache.commons.jexl3.JexlEngine; import org.apache.commons.jexl3.JexlOperator; import org.apache.commons.jexl3.introspection.JexlMethod; import org.apache.commons.jexl3.introspection.JexlPermissions; import org.apache.commons.jexl3.introspection.JexlPropertyGet; import org.apache.commons.jexl3.introspection.JexlPropertySet; import org.apache.commons.jexl3.introspection.JexlUberspect; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.EnumSet; import java.util.Enumeration; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.ConcurrentHashMap; import java.lang.ref.Reference; import java.lang.ref.SoftReference; import java.util.List; /** * Implementation of Uberspect to provide the default introspective * functionality of JEXL. * <p> * This is the class to derive to customize introspection.</p> * * @since 1.0 */ public class Uberspect implements JexlUberspect { /** Publicly exposed special failure object returned by tryInvoke. */ public static final Object TRY_FAILED = JexlEngine.TRY_FAILED; /** The logger to use for all warnings and errors. */ protected final Log logger; /** The resolver strategy. */ private final JexlUberspect.ResolverStrategy strategy; /** The permissions. */ private final JexlPermissions permissions; /** The introspector version. */ private final AtomicInteger version; /** The soft reference to the introspector currently in use. */ private volatile Reference<Introspector> ref; /** The class loader reference; used to recreate the introspector when necessary. */ private volatile Reference<ClassLoader> loader; /** * The map from arithmetic classes to overloaded operator sets. * <p> * This keeps track of which operator methods are overloaded per JexlArithemtic classes * allowing a fail fast test during interpretation by avoiding seeking a method when there is none. */ private final Map<Class<? extends JexlArithmetic>, Set<JexlOperator>> operatorMap; /** * Creates a new Uberspect. * @param runtimeLogger the logger used for all logging needs * @param sty the resolver strategy */ public Uberspect(final Log runtimeLogger, final JexlUberspect.ResolverStrategy sty) { this(runtimeLogger, sty, null); } /** * Creates a new Uberspect. * @param runtimeLogger the logger used for all logging needs * @param sty the resolver strategy * @param perms the introspector permissions */ public Uberspect(final Log runtimeLogger, final JexlUberspect.ResolverStrategy sty, final JexlPermissions perms) { logger = runtimeLogger == null? LogFactory.getLog(JexlEngine.class) : runtimeLogger; strategy = sty == null? JexlUberspect.JEXL_STRATEGY : sty; permissions = perms == null? Permissions.DEFAULT : perms; ref = new SoftReference<Introspector>(null); loader = new SoftReference<ClassLoader>(getClass().getClassLoader()); operatorMap = new ConcurrentHashMap<Class<? extends JexlArithmetic>, Set<JexlOperator>>(); version = new AtomicInteger(0); } /** * Gets the current introspector base. * <p> * If the reference has been collected, this method will recreate the underlying introspector.</p> * @return the introspector */ // CSOFF: DoubleCheckedLocking protected final Introspector base() { Introspector intro = ref.get(); if (intro == null) { // double checked locking is ok (fixed by Java 5 memory model). synchronized (this) { intro = ref.get(); if (intro == null) { intro = new Introspector(logger, loader.get(), permissions); ref = new SoftReference<Introspector>(intro); loader = new SoftReference<ClassLoader>(intro.getLoader()); version.incrementAndGet(); } } } return intro; } // CSON: DoubleCheckedLocking @Override public void setClassLoader(final ClassLoader nloader) { synchronized (this) { Introspector intro = ref.get(); if (intro != null) { intro.setLoader(nloader); } else { intro = new Introspector(logger, nloader, permissions); ref = new SoftReference<Introspector>(intro); } loader = new SoftReference<ClassLoader>(intro.getLoader()); operatorMap.clear(); version.incrementAndGet(); } } @Override public ClassLoader getClassLoader() { return loader.get(); } @Override public int getVersion() { return version.intValue(); } /** * Gets a class by name through this introspector class loader. * @param className the class name * @return the class instance or null if it could not be found */ public final Class<?> getClassByName(final String className) { return base().getClassByName(className); } /** * Gets the field named by * <code>key</code> for the class * <code>c</code>. * * @param c Class in which the field search is taking place * @param key Name of the field being searched for * @return a {@link java.lang.reflect.Field} or null if it does not exist or is not accessible */ public final Field getField(final Class<?> c, final String key) { return base().getField(c, key); } /** * Gets the accessible field names known for a given class. * @param c the class * @return the class field names */ public final String[] getFieldNames(final Class<?> c) { return base().getFieldNames(c); } /** * Gets the method defined by * <code>name</code> and * <code>params</code> for the Class * <code>c</code>. * * @param c Class in which the method search is taking place * @param name Name of the method being searched for * @param params An array of Objects (not Classes) that describe the * the parameters * * @return a {@link java.lang.reflect.Method} * or null if no unambiguous method could be found through introspection. */ public final Method getMethod(final Class<?> c, final String name, final Object[] params) { return base().getMethod(c, new MethodKey(name, params)); } /** * Gets the method defined by * <code>key</code> and for the Class * <code>c</code>. * * @param c Class in which the method search is taking place * @param key MethodKey of the method being searched for * * @return a {@link java.lang.reflect.Method} * or null if no unambiguous method could be found through introspection. */ public final Method getMethod(final Class<?> c, final MethodKey key) { return base().getMethod(c, key); } /** * Gets the accessible methods names known for a given class. * @param c the class * @return the class method names */ public final String[] getMethodNames(final Class<?> c) { return base().getMethodNames(c); } /** * Gets all the methods with a given name from this map. * @param c the class * @param methodName the seeked methods name * @return the array of methods */ public final Method[] getMethods(final Class<?> c, final String methodName) { return base().getMethods(c, methodName); } @Override public JexlMethod getMethod(final Object obj, final String method, final Object... args) { return MethodExecutor.discover(base(), obj, method, args); } @Override public List<PropertyResolver> getResolvers(final JexlOperator op, final Object obj) { return strategy.apply(op, obj); } @Override public JexlPropertyGet getPropertyGet(final Object obj, final Object identifier) { return getPropertyGet(null, obj, identifier); } @Override public JexlPropertyGet getPropertyGet( final List<PropertyResolver> resolvers, final Object obj, final Object identifier ) { final Class<?> claz = obj.getClass(); final String property = AbstractExecutor.castString(identifier); final Introspector is = base(); final List<PropertyResolver> r = resolvers == null? strategy.apply(null, obj) : resolvers; JexlPropertyGet executor = null; for (final PropertyResolver resolver : r) { if (resolver instanceof JexlResolver) { switch ((JexlResolver) resolver) { case PROPERTY: // first try for a getFoo() type of property (also getfoo() ) executor = PropertyGetExecutor.discover(is, claz, property); if (executor == null) { executor = BooleanGetExecutor.discover(is, claz, property); } break; case MAP: // let's see if we are a map... executor = MapGetExecutor.discover(is, claz, identifier); break; case LIST: // let's see if this is a list or array final Integer index = AbstractExecutor.castInteger(identifier); if (index != null) { executor = ListGetExecutor.discover(is, claz, index); } break; case DUCK: // if that didn't work, look for get(foo) executor = DuckGetExecutor.discover(is, claz, identifier); if (executor == null && property != null && property != identifier) { // look for get("foo") if we did not try yet (just above) executor = DuckGetExecutor.discover(is, claz, property); } break; case FIELD: // a field may be? (can not be a number) executor = FieldGetExecutor.discover(is, claz, property); // static class fields (enums included) if (obj instanceof Class<?>) { executor = FieldGetExecutor.discover(is, (Class<?>) obj, property); } break; case CONTAINER: // or an indexed property? executor = IndexedType.discover(is, obj, property); break; default: continue; // in case we add new ones in enum } } else { executor = resolver.getPropertyGet(this, obj, identifier); } if (executor != null) { return executor; } } return null; } @Override public JexlPropertySet getPropertySet(final Object obj, final Object identifier, final Object arg) { return getPropertySet(null, obj, identifier, arg); } @Override public JexlPropertySet getPropertySet( final List<PropertyResolver> resolvers, final Object obj, final Object identifier, final Object arg ) { final Class<?> claz = obj.getClass(); final String property = AbstractExecutor.castString(identifier); final Introspector is = base(); final List<PropertyResolver> actual = resolvers == null? strategy.apply(null, obj) : resolvers; JexlPropertySet executor = null; for (final PropertyResolver resolver : actual) { if (resolver instanceof JexlResolver) { switch ((JexlResolver) resolver) { case PROPERTY: // first try for a setFoo() type of property (also setfoo() ) executor = PropertySetExecutor.discover(is, claz, property, arg); break; case MAP: // let's see if we are a map... executor = MapSetExecutor.discover(is, claz, identifier, arg); break; case LIST: // let's see if we can convert the identifier to an int, // if obj is an array or a list, we can still do something final Integer index = AbstractExecutor.castInteger(identifier); if (index != null) { executor = ListSetExecutor.discover(is, claz, identifier, arg); } break; case DUCK: // if that didn't work, look for set(foo) executor = DuckSetExecutor.discover(is, claz, identifier, arg); if (executor == null && property != null && property != identifier) { executor = DuckSetExecutor.discover(is, claz, property, arg); } break; case FIELD: // a field may be? executor = FieldSetExecutor.discover(is, claz, property, arg); break; case CONTAINER: default: continue; // in case we add new ones in enum } } else { executor = resolver.getPropertySet(this, obj, identifier, arg); } if (executor != null) { return executor; } } return null; } @Override @SuppressWarnings("unchecked") public Iterator<?> getIterator(final Object obj) { if (obj instanceof Iterator<?>) { return ((Iterator<?>) obj); } if (obj.getClass().isArray()) { return new ArrayIterator(obj); } if (obj instanceof Map<?, ?>) { return ((Map<?, ?>) obj).values().iterator(); } if (obj instanceof Enumeration<?>) { return new EnumerationIterator<Object>((Enumeration<Object>) obj); } if (obj instanceof Iterable<?>) { return ((Iterable<?>) obj).iterator(); } try { // look for an iterator() method to support the JDK5 Iterable // interface or any user tools/DTOs that want to work in // foreach without implementing the Collection interface final JexlMethod it = getMethod(obj, "iterator", (Object[]) null); if (it != null && Iterator.class.isAssignableFrom(it.getReturnType())) { return (Iterator<Object>) it.invoke(obj, (Object[]) null); } } catch (final Exception xany) { if (logger != null && logger.isDebugEnabled()) { logger.info("unable to solve iterator()", xany); } } return null; } @Override public JexlMethod getConstructor(final Object ctorHandle, final Object... args) { return ConstructorMethod.discover(base(), ctorHandle, args); } /** * The concrete uberspect Arithmetic class. */ protected class ArithmeticUberspect implements JexlArithmetic.Uberspect { /** The arithmetic instance being analyzed. */ private final JexlArithmetic arithmetic; /** The set of overloaded operators. */ private final Set<JexlOperator> overloads; /** * Creates an instance. * @param theArithmetic the arithmetic instance * @param theOverloads the overloaded operators */ ArithmeticUberspect(final JexlArithmetic theArithmetic, final Set<JexlOperator> theOverloads) { this.arithmetic = theArithmetic; this.overloads = theOverloads; } @Override public JexlMethod getOperator(final JexlOperator operator, final Object... args) { return overloads.contains(operator) && args != null ? getMethod(arithmetic, operator.getMethodName(), args) : null; } @Override public boolean overloads(final JexlOperator operator) { return overloads.contains(operator); } } @Override public JexlArithmetic.Uberspect getArithmetic(final JexlArithmetic arithmetic) { JexlArithmetic.Uberspect jau = null; if (arithmetic != null) { final Class<? extends JexlArithmetic> aclass = arithmetic.getClass(); Set<JexlOperator> ops = operatorMap.get(aclass); if (ops == null) { ops = EnumSet.noneOf(JexlOperator.class); // deal only with derived classes if (!JexlArithmetic.class.equals(aclass)) { for (final JexlOperator op : JexlOperator.values()) { final Method[] methods = getMethods(arithmetic.getClass(), op.getMethodName()); if (methods != null) { mloop: for (final Method method : methods) { final Class<?>[] parms = method.getParameterTypes(); if (parms.length != op.getArity()) { continue; } // filter method that is an actual overload: // - not inherited (not declared by base class) // - nor overridden (not present in base class) if (!JexlArithmetic.class.equals(method.getDeclaringClass())) { try { JexlArithmetic.class.getMethod(method.getName(), method.getParameterTypes()); } catch (final NoSuchMethodException xmethod) { // method was not found in JexlArithmetic; this is an operator definition ops.add(op); } } } } } } // register this arithmetic class in the operator map operatorMap.put(aclass, ops); } jau = new ArithmeticUberspect(arithmetic, ops); } return jau; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements.See the NOTICE file * distributed with this work for additional information * regarding copyright ownership.The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License.You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.HiveInputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.BucketMapJoinContext; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.MapredLocalWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SMBJoinDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.PriorityQueue; import org.apache.hive.common.util.ReflectionUtil; /** * Sorted Merge Map Join Operator. */ public class SMBMapJoinOperator extends AbstractMapJoinOperator<SMBJoinDesc> implements Serializable { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(SMBMapJoinOperator.class .getName()); private MapredLocalWork localWork = null; private Map<String, MergeQueue> aliasToMergeQueue = Collections.emptyMap(); transient List<Object>[] keyWritables; transient List<Object>[] nextKeyWritables; RowContainer<List<Object>>[] nextGroupStorage; RowContainer<List<Object>>[] candidateStorage; transient String[] tagToAlias; private transient boolean[] fetchDone; private transient boolean[] foundNextKeyGroup; transient boolean firstFetchHappened = false; private transient boolean inputFileChanged = false; transient boolean localWorkInited = false; transient boolean initDone = false; // This join has been converted to a SMB join by the hive optimizer. The user did not // give a mapjoin hint in the query. The hive optimizer figured out that the join can be // performed as a smb join, based on all the tables/partitions being joined. private transient boolean convertedAutomaticallySMBJoin = false; /** Kryo ctor. */ protected SMBMapJoinOperator() { super(); } public SMBMapJoinOperator(CompilationOpContext ctx) { super(ctx); } public SMBMapJoinOperator(AbstractMapJoinOperator<? extends MapJoinDesc> mapJoinOp) { super(mapJoinOp); } @Override protected void initializeOp(Configuration hconf) throws HiveException { // If there is a sort-merge join followed by a regular join, the SMBJoinOperator may not // get initialized at all. Consider the following query: // A SMB B JOIN C // For the mapper processing C, The SMJ is not initialized, no need to close it either. initDone = true; super.initializeOp(hconf); closeCalled = false; this.firstFetchHappened = false; this.inputFileChanged = false; // get the largest table alias from order int maxAlias = 0; for (byte pos = 0; pos < order.length; pos++) { if (pos > maxAlias) { maxAlias = pos; } } maxAlias += 1; nextGroupStorage = new RowContainer[maxAlias]; candidateStorage = new RowContainer[maxAlias]; keyWritables = new ArrayList[maxAlias]; nextKeyWritables = new ArrayList[maxAlias]; fetchDone = new boolean[maxAlias]; foundNextKeyGroup = new boolean[maxAlias]; int bucketSize; // For backwards compatibility reasons we honor the older // HIVEMAPJOINBUCKETCACHESIZE if set different from default. // By hive 0.13 we should remove this code. int oldVar = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEMAPJOINBUCKETCACHESIZE); if (oldVar != 100) { bucketSize = oldVar; } else { bucketSize = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVESMBJOINCACHEROWS); } for (byte pos = 0; pos < order.length; pos++) { RowContainer<List<Object>> rc = JoinUtil.getRowContainer(hconf, rowContainerStandardObjectInspectors[pos], pos, bucketSize,spillTableDesc, conf, !hasFilter(pos), reporter); nextGroupStorage[pos] = rc; RowContainer<List<Object>> candidateRC = JoinUtil.getRowContainer(hconf, rowContainerStandardObjectInspectors[pos], pos, bucketSize,spillTableDesc, conf, !hasFilter(pos), reporter); candidateStorage[pos] = candidateRC; } tagToAlias = conf.convertToArray(conf.getTagToAlias(), String.class); for (byte pos = 0; pos < order.length; pos++) { if (pos != posBigTable) { fetchDone[pos] = false; } foundNextKeyGroup[pos] = false; } } @Override public void initializeLocalWork(Configuration hconf) throws HiveException { initializeMapredLocalWork(this.getConf(), hconf, this.getConf().getLocalWork(), LOG); super.initializeLocalWork(hconf); } public void initializeMapredLocalWork(MapJoinDesc mjConf, Configuration hconf, MapredLocalWork localWork, Logger l4j) throws HiveException { if (localWork == null || localWorkInited) { return; } localWorkInited = true; this.localWork = localWork; aliasToMergeQueue = new HashMap<String, MergeQueue>(); // create map local operators Map<String,FetchWork> aliasToFetchWork = localWork.getAliasToFetchWork(); Map<String, Operator<? extends OperatorDesc>> aliasToWork = localWork.getAliasToWork(); Map<String, DummyStoreOperator> aliasToSinkWork = conf.getAliasToSink(); // The operator tree till the sink operator needs to be processed while // fetching the next row to fetch from the priority queue (possibly containing // multiple files in the small table given a file in the big table). The remaining // tree will be processed while processing the join. // Look at comments in DummyStoreOperator for additional explanation. for (Map.Entry<String, FetchWork> entry : aliasToFetchWork.entrySet()) { String alias = entry.getKey(); FetchWork fetchWork = entry.getValue(); JobConf jobClone = new JobConf(hconf); if (UserGroupInformation.isSecurityEnabled()) { String hadoopAuthToken = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION); if(hadoopAuthToken != null){ jobClone.set("mapreduce.job.credentials.binary", hadoopAuthToken); } } TableScanOperator ts = (TableScanOperator)aliasToWork.get(alias); // push down projections ColumnProjectionUtils.appendReadColumns( jobClone, ts.getNeededColumnIDs(), ts.getNeededColumns(), ts.getNeededNestedColumnPaths()); // push down filters HiveInputFormat.pushFilters(jobClone, ts, null); AcidUtils.setAcidOperationalProperties(jobClone, ts.getConf().isTranscationalTable(), ts.getConf().getAcidOperationalProperties()); AcidUtils.setValidWriteIdList(jobClone, ts.getConf()); ts.passExecContext(getExecContext()); FetchOperator fetchOp = new FetchOperator(fetchWork, jobClone); ts.initialize(jobClone, new ObjectInspector[]{fetchOp.getOutputObjectInspector()}); fetchOp.clearFetchContext(); DummyStoreOperator sinkOp = aliasToSinkWork.get(alias); MergeQueue mergeQueue = new MergeQueue(alias, fetchWork, jobClone, ts, sinkOp); aliasToMergeQueue.put(alias, mergeQueue); l4j.info("fetch operators for " + alias + " initialized"); } } private byte tagForAlias(String alias) { for (byte tag = 0; tag < tagToAlias.length; tag++) { if (alias.equals(tagToAlias[tag])) { return tag; } } return -1; } // The input file has changed - load the correct hash bucket @Override public void cleanUpInputFileChangedOp() throws HiveException { inputFileChanged = true; } protected List<Object> smbJoinComputeKeys(Object row, byte alias) throws HiveException { return JoinUtil.computeKeys(row, joinKeys[alias], joinKeysObjectInspectors[alias]); } @Override public void process(Object row, int tag) throws HiveException { if (tag == posBigTable) { if (inputFileChanged) { if (firstFetchHappened) { // we need to first join and flush out data left by the previous file. joinFinalLeftData(); } // set up the fetch operator for the new input file. for (Map.Entry<String, MergeQueue> entry : aliasToMergeQueue.entrySet()) { String alias = entry.getKey(); MergeQueue mergeQueue = entry.getValue(); setUpFetchContexts(alias, mergeQueue); } firstFetchHappened = false; inputFileChanged = false; } } if (!firstFetchHappened) { firstFetchHappened = true; // fetch the first group for all small table aliases for (byte pos = 0; pos < order.length; pos++) { if (pos != posBigTable) { fetchNextGroup(pos); } } } byte alias = (byte) tag; // compute keys and values as StandardObjects List<Object> key = smbJoinComputeKeys(row, alias); List<Object> value = getFilteredValue(alias, row); //have we reached a new key group? boolean nextKeyGroup = processKey(alias, key); if (nextKeyGroup) { //assert this.nextGroupStorage[alias].size() == 0; this.nextGroupStorage[alias].addRow(value); foundNextKeyGroup[tag] = true; if (tag != posBigTable) { return; } } reportProgress(); numMapRowsRead++; // the big table has reached a new key group. try to let the small tables // catch up with the big table. if (nextKeyGroup) { assert tag == posBigTable; List<Byte> smallestPos = null; do { smallestPos = joinOneGroup(); //jump out the loop if we need input from the big table } while (smallestPos != null && smallestPos.size() > 0 && !smallestPos.contains(this.posBigTable)); return; } assert !nextKeyGroup; candidateStorage[tag].addRow(value); } /* * this happens either when the input file of the big table is changed or in * closeop. It needs to fetch all the left data from the small tables and try * to join them. */ private void joinFinalLeftData() throws HiveException { RowContainer bigTblRowContainer = this.candidateStorage[this.posBigTable]; boolean allFetchDone = allFetchDone(); // if all left data in small tables are less than and equal to the left data // in big table, let's them catch up while (bigTblRowContainer != null && bigTblRowContainer.rowCount() > 0 && !allFetchDone) { joinOneGroup(); bigTblRowContainer = this.candidateStorage[this.posBigTable]; allFetchDone = allFetchDone(); } while (!allFetchDone) { List<Byte> ret = joinOneGroup(); if (ret == null || ret.size() == 0) { break; } reportProgress(); numMapRowsRead++; allFetchDone = allFetchDone(); } boolean dataInCache = true; while (dataInCache) { for (byte pos = 0; pos < order.length; pos++) { if (this.foundNextKeyGroup[pos] && this.nextKeyWritables[pos] != null) { promoteNextGroupToCandidate(pos); } } joinOneGroup(); dataInCache = false; for (byte pos = 0; pos < order.length; pos++) { if (this.candidateStorage[pos] != null && this.candidateStorage[pos].hasRows()) { dataInCache = true; break; } } } } private boolean allFetchDone() { boolean allFetchDone = true; for (byte pos = 0; pos < order.length; pos++) { if (pos == posBigTable) { continue; } allFetchDone = allFetchDone && fetchDone[pos]; } return allFetchDone; } private List<Byte> joinOneGroup() throws HiveException { int[] smallestPos = findSmallestKey(); List<Byte> listOfNeedFetchNext = null; if(smallestPos != null) { listOfNeedFetchNext = joinObject(smallestPos); if (listOfNeedFetchNext.size() > 0) { // listOfNeedFetchNext contains all tables that we have joined data in their // candidateStorage, and we need to clear candidate storage and promote their // nextGroupStorage to candidateStorage and fetch data until we reach a // new group. for (Byte b : listOfNeedFetchNext) { fetchNextGroup(b); } } } return listOfNeedFetchNext; } private List<Byte> joinObject(int[] smallestPos) throws HiveException { List<Byte> needFetchList = new ArrayList<Byte>(); byte index = (byte) (smallestPos.length - 1); for (; index >= 0; index--) { if (smallestPos[index] > 0 || keyWritables[index] == null) { putDummyOrEmpty(index); continue; } storage[index] = candidateStorage[index]; needFetchList.add(index); if (smallestPos[index] < 0) { break; } } for (index--; index >= 0; index--) { putDummyOrEmpty(index); } checkAndGenObject(); for (Byte pos : needFetchList) { this.candidateStorage[pos].clearRows(); this.keyWritables[pos] = null; } return needFetchList; } private void fetchNextGroup(Byte t) throws HiveException { if (foundNextKeyGroup[t]) { // first promote the next group to be the current group if we reached a // new group in the previous fetch if (this.nextKeyWritables[t] != null) { promoteNextGroupToCandidate(t); } else { this.keyWritables[t] = null; this.candidateStorage[t] = null; this.nextGroupStorage[t] = null; } foundNextKeyGroup[t] = false; } //for the big table, we only need to promote the next group to the current group. if(t == posBigTable) { return; } //for tables other than the big table, we need to fetch more data until reach a new group or done. while (!foundNextKeyGroup[t]) { if (fetchDone[t]) { break; } fetchOneRow(t); } if (!foundNextKeyGroup[t] && fetchDone[t]) { this.nextKeyWritables[t] = null; } } private void promoteNextGroupToCandidate(Byte t) throws HiveException { this.keyWritables[t] = this.nextKeyWritables[t]; this.nextKeyWritables[t] = null; RowContainer<List<Object>> oldRowContainer = this.candidateStorage[t]; oldRowContainer.clearRows(); this.candidateStorage[t] = this.nextGroupStorage[t]; this.nextGroupStorage[t] = oldRowContainer; } private int compareKeys (List<Object> k1, List<Object> k2) { int ret = 0; // join keys have difference sizes? ret = k1.size() - k2.size(); if (ret != 0) { return ret; } for (int i = 0; i < k1.size(); i++) { WritableComparable key_1 = (WritableComparable) k1.get(i); WritableComparable key_2 = (WritableComparable) k2.get(i); if (key_1 == null && key_2 == null) { return nullsafes != null && nullsafes[i] ? 0 : -1; // just return k1 is smaller than k2 } else if (key_1 == null) { return -1; } else if (key_2 == null) { return 1; } ret = WritableComparator.get(key_1.getClass()).compare(key_1, key_2); if(ret != 0) { return ret; } } return ret; } private void putDummyOrEmpty(Byte i) { // put a empty list or null if (noOuterJoin) { storage[i] = emptyList; } else { storage[i] = dummyObjVectors[i]; } } private int[] findSmallestKey() { int[] result = new int[order.length]; List<Object> smallestOne = null; for (byte pos = 0; pos < order.length; pos++) { List<Object> key = keyWritables[pos]; if (key == null) { continue; } if (smallestOne == null) { smallestOne = key; result[pos] = -1; continue; } result[pos] = compareKeys(key, smallestOne); if (result[pos] < 0) { smallestOne = key; } } return smallestOne == null ? null : result; } private boolean processKey(byte alias, List<Object> key) throws HiveException { List<Object> keyWritable = keyWritables[alias]; if (keyWritable == null) { //the first group. keyWritables[alias] = key; return false; } else { int cmp = compareKeys(key, keyWritable); if (cmp != 0) { nextKeyWritables[alias] = key; return true; } return false; } } private void setUpFetchContexts(String alias, MergeQueue mergeQueue) throws HiveException { mergeQueue.clearFetchContext(); Path currentInputPath = getExecContext().getCurrentInputPath(); BucketMapJoinContext bucketMatcherCxt = localWork.getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt.getBucketMatcherClass(); BucketMatcher bucketMatcher = ReflectionUtil.newInstance(bucketMatcherCls, null); getExecContext().setFileId(bucketMatcherCxt.createFileId(currentInputPath.toString())); if (LOG.isInfoEnabled()) { LOG.info("set task id: " + getExecContext().getFileId()); } bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt .getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputPath.toString(), bucketMatcherCxt.getMapJoinBigTableAlias(), alias); mergeQueue.setupContext(aliasFiles); } private void fetchOneRow(byte tag) { String table = tagToAlias[tag]; MergeQueue mergeQueue = aliasToMergeQueue.get(table); // The operator tree till the sink operator has already been processed while // fetching the next row to fetch from the priority queue (possibly containing // multiple files in the small table given a file in the big table). Now, process // the remaining tree. Look at comments in DummyStoreOperator for additional // explanation. Operator<? extends OperatorDesc> forwardOp = conf.getAliasToSink().get(table).getChildOperators().get(0); try { InspectableObject row = mergeQueue.getNextRow(); if (row == null) { fetchDone[tag] = true; return; } forwardOp.process(row.o, tag); // check if any operator had a fatal error or early exit during // execution if (forwardOp.getDone()) { fetchDone[tag] = true; } } catch (Throwable e) { if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { throw new RuntimeException("Map local work failed", e); } } } transient boolean closeCalled = false; @Override public void closeOp(boolean abort) throws HiveException { if(closeCalled) { return; } closeCalled = true; // If there is a sort-merge join followed by a regular join, the SMBJoinOperator may not // get initialized at all. Consider the following query: // A SMB B JOIN C // For the mapper processing C, The SMJ is not initialized, no need to close it either. if (!initDone) { return; } if (inputFileChanged || !firstFetchHappened) { //set up the fetch operator for the new input file. for (Map.Entry<String, MergeQueue> entry : aliasToMergeQueue.entrySet()) { String alias = entry.getKey(); MergeQueue mergeQueue = entry.getValue(); setUpFetchContexts(alias, mergeQueue); } firstFetchHappened = true; for (byte pos = 0; pos < order.length; pos++) { if (pos != posBigTable) { fetchNextGroup(pos); } } inputFileChanged = false; } joinFinalLeftData(); //clean up for (int pos = 0; pos < order.length; pos++) { if (pos != posBigTable) { fetchDone[pos] = false; } foundNextKeyGroup[pos] = false; } localWorkInited = false; super.closeOp(abort); for (Map.Entry<String, MergeQueue> entry : aliasToMergeQueue.entrySet()) { String alias = entry.getKey(); MergeQueue mergeQueue = entry.getValue(); Operator forwardOp = localWork.getAliasToWork().get(alias); forwardOp.close(abort); mergeQueue.clearFetchContext(); } } @Override protected boolean allInitializedParentsAreClosed() { return true; } /** * Implements the getName function for the Node Interface. * * @return the name of the operator */ @Override public String getName() { return getOperatorName(); } static public String getOperatorName() { return "MAPJOIN"; } @Override public OperatorType getType() { return OperatorType.MAPJOIN; } public boolean isConvertedAutomaticallySMBJoin() { return convertedAutomaticallySMBJoin; } public void setConvertedAutomaticallySMBJoin(boolean convertedAutomaticallySMBJoin) { this.convertedAutomaticallySMBJoin = convertedAutomaticallySMBJoin; } // returns rows from possibly multiple bucket files of small table in ascending order // by utilizing primary queue (borrowed from hadoop) // elements of queue (Integer) are index to FetchOperator[] (segments) private class MergeQueue extends PriorityQueue<Integer> { private final String alias; private final FetchWork fetchWork; private final JobConf jobConf; // for keeping track of the number of elements read. just for debugging transient int counter; transient FetchOperator[] segments; transient List<ExprNodeEvaluator> keyFields; transient List<ObjectInspector> keyFieldOIs; transient Operator<? extends OperatorDesc> forwardOp; transient DummyStoreOperator sinkOp; // index of FetchOperator which is providing smallest one transient Integer currentMinSegment; transient MutablePair<List<Object>, InspectableObject>[] keys; public MergeQueue(String alias, FetchWork fetchWork, JobConf jobConf, Operator<? extends OperatorDesc> forwardOp, DummyStoreOperator sinkOp) { this.alias = alias; this.fetchWork = fetchWork; this.jobConf = jobConf; this.forwardOp = forwardOp; this.sinkOp = sinkOp; } // paths = bucket files of small table for current bucket file of big table // initializes a FetchOperator for each file in paths, reuses FetchOperator if possible // currently, number of paths is always the same (bucket numbers are all the same over // all partitions in a table). // But if hive supports assigning bucket number for each partition, this can be vary public void setupContext(List<Path> paths) throws HiveException { int segmentLen = paths.size(); FetchOperator.setFetchOperatorContext(jobConf, fetchWork.getPartDir()); FetchOperator[] segments = segmentsForSize(segmentLen); for (int i = 0 ; i < segmentLen; i++) { Path path = paths.get(i); if (segments[i] == null) { segments[i] = new FetchOperator(fetchWork, new JobConf(jobConf)); } segments[i].setupContext(Arrays.asList(path)); } initialize(segmentLen); for (int i = 0; i < segmentLen; i++) { if (nextHive(i)) { put(i); } } counter = 0; } @SuppressWarnings("unchecked") private FetchOperator[] segmentsForSize(int segmentLen) { if (segments == null || segments.length < segmentLen) { FetchOperator[] newSegments = new FetchOperator[segmentLen]; MutablePair<List<Object>, InspectableObject>[] newKeys = new MutablePair[segmentLen]; if (segments != null) { System.arraycopy(segments, 0, newSegments, 0, segments.length); System.arraycopy(keys, 0, newKeys, 0, keys.length); } segments = newSegments; keys = newKeys; } return segments; } public void clearFetchContext() throws HiveException { if (segments != null) { for (FetchOperator op : segments) { if (op != null) { op.clearFetchContext(); } } } } @Override protected boolean lessThan(Object a, Object b) { return compareKeys(keys[(Integer) a].getLeft(), keys[(Integer)b].getLeft()) < 0; } public final InspectableObject getNextRow() throws IOException { if (currentMinSegment != null) { adjustPriorityQueue(currentMinSegment); } Integer current = top(); if (current == null) { if (LOG.isInfoEnabled()) { LOG.info("MergeQueue forwarded " + counter + " rows"); } return null; } counter++; currentMinSegment = current; return keys[currentMinSegment].getRight(); } private void adjustPriorityQueue(Integer current) throws IOException { if (nextIO(current)) { adjustTop(); // sort } else { pop(); } } // wrapping for exception handling private boolean nextHive(Integer current) throws HiveException { try { return next(current); } catch (IOException e) { throw new HiveException(e); } } // wrapping for exception handling private boolean nextIO(Integer current) throws IOException { try { return next(current); } catch (HiveException e) { throw new IOException(e); } } // return true if current min segment(FetchOperator) has next row private boolean next(Integer current) throws IOException, HiveException { if (keyFields == null) { byte tag = tagForAlias(alias); // joinKeys/joinKeysOI are initialized after making merge queue, so setup lazily at runtime keyFields = joinKeys[tag]; keyFieldOIs = joinKeysObjectInspectors[tag]; } InspectableObject nextRow = segments[current].getNextRow(); while (nextRow != null) { sinkOp.reset(); if (keys[current] == null) { keys[current] = new MutablePair<List<Object>, InspectableObject>(); } // Pass the row though the operator tree. It is guaranteed that not more than 1 row can // be produced from a input row. forwardOp.process(nextRow.o, 0); nextRow = sinkOp.getResult(); // It is possible that the row got absorbed in the operator tree. if (nextRow.o != null) { // todo this should be changed to be evaluated lazily, especially for single segment case keys[current].setLeft(JoinUtil.computeKeys(nextRow.o, keyFields, keyFieldOIs)); keys[current].setRight(nextRow); return true; } nextRow = segments[current].getNextRow(); } keys[current] = null; return false; } } @Override public boolean opAllowedConvertMapJoin() { return false; } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.capacity; import java.net.URI; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.inject.Inject; import javax.naming.ConfigurationException; import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreDriver; import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider; import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProviderManager; import org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreDriver; import org.apache.cloudstack.framework.config.ConfigKey; import org.apache.cloudstack.framework.config.Configurable; import org.apache.cloudstack.framework.config.dao.ConfigurationDao; import org.apache.cloudstack.framework.messagebus.MessageBus; import org.apache.cloudstack.framework.messagebus.PublishScope; import org.apache.cloudstack.storage.datastore.db.StoragePoolVO; import org.apache.log4j.Logger; import com.cloud.agent.AgentManager; import com.cloud.agent.Listener; import com.cloud.agent.api.AgentControlAnswer; import com.cloud.agent.api.AgentControlCommand; import com.cloud.agent.api.Answer; import com.cloud.agent.api.Command; import com.cloud.agent.api.StartupCommand; import com.cloud.agent.api.StartupRoutingCommand; import com.cloud.capacity.dao.CapacityDao; import com.cloud.configuration.Config; import com.cloud.dc.ClusterDetailsDao; import com.cloud.dc.ClusterDetailsVO; import com.cloud.dc.ClusterVO; import com.cloud.dc.dao.ClusterDao; import com.cloud.deploy.DeploymentClusterPlanner; import com.cloud.event.UsageEventVO; import com.cloud.exception.ConnectionException; import com.cloud.host.Host; import com.cloud.host.HostVO; import com.cloud.host.Status; import com.cloud.host.dao.HostDao; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao; import com.cloud.offering.ServiceOffering; import com.cloud.resource.ResourceListener; import com.cloud.resource.ResourceManager; import com.cloud.resource.ResourceState; import com.cloud.resource.ServerResource; import com.cloud.service.ServiceOfferingVO; import com.cloud.service.dao.ServiceOfferingDao; import com.cloud.storage.StorageManager; import com.cloud.storage.VMTemplateStoragePoolVO; import com.cloud.storage.VMTemplateVO; import com.cloud.storage.dao.VMTemplatePoolDao; import com.cloud.storage.dao.VolumeDao; import com.cloud.utils.DateUtil; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.component.ManagerBase; import com.cloud.utils.db.DB; import com.cloud.utils.db.SearchCriteria; import com.cloud.utils.db.Transaction; import com.cloud.utils.db.TransactionCallbackNoReturn; import com.cloud.utils.db.TransactionStatus; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.fsm.StateListener; import com.cloud.utils.fsm.StateMachine2; import com.cloud.vm.UserVmDetailVO; import com.cloud.vm.UserVmVO; import com.cloud.vm.VMInstanceVO; import com.cloud.vm.VirtualMachine; import com.cloud.vm.VirtualMachine.Event; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.VmDetailConstants; import com.cloud.vm.dao.UserVmDao; import com.cloud.vm.dao.UserVmDetailsDao; import com.cloud.vm.dao.VMInstanceDao; import com.cloud.vm.snapshot.dao.VMSnapshotDao; public class CapacityManagerImpl extends ManagerBase implements CapacityManager, StateListener<State, VirtualMachine.Event, VirtualMachine>, Listener, ResourceListener, Configurable { private static final Logger s_logger = Logger.getLogger(CapacityManagerImpl.class); @Inject CapacityDao _capacityDao; @Inject ConfigurationDao _configDao; @Inject ServiceOfferingDao _offeringsDao; @Inject HostDao _hostDao; @Inject VMInstanceDao _vmDao; @Inject VolumeDao _volumeDao; @Inject VMTemplatePoolDao _templatePoolDao; @Inject AgentManager _agentManager; @Inject ResourceManager _resourceMgr; @Inject StorageManager _storageMgr; @Inject HypervisorCapabilitiesDao _hypervisorCapabilitiesDao; @Inject protected VMSnapshotDao _vmSnapshotDao; @Inject protected UserVmDao _userVMDao; @Inject protected UserVmDetailsDao _userVmDetailsDao; @Inject ClusterDao _clusterDao; @Inject DataStoreProviderManager _dataStoreProviderMgr; @Inject ClusterDetailsDao _clusterDetailsDao; private int _vmCapacityReleaseInterval; long _extraBytesPerVolume = 0; @Inject MessageBus _messageBus; @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { _vmCapacityReleaseInterval = NumbersUtil.parseInt(_configDao.getValue(Config.CapacitySkipcountingHours.key()), 3600); VirtualMachine.State.getStateMachine().registerListener(this); _agentManager.registerForHostEvents(new StorageCapacityListener(_capacityDao, _storageMgr), true, false, false); _agentManager.registerForHostEvents(new ComputeCapacityListener(_capacityDao, this), true, false, false); return true; } @Override public boolean start() { _resourceMgr.registerResourceEvent(ResourceListener.EVENT_PREPARE_MAINTENANCE_AFTER, this); _resourceMgr.registerResourceEvent(ResourceListener.EVENT_CANCEL_MAINTENANCE_AFTER, this); return true; } @Override public boolean stop() { return true; } @DB @Override public boolean releaseVmCapacity(VirtualMachine vm, final boolean moveFromReserved, final boolean moveToReservered, final Long hostId) { if (hostId == null) { return true; } final ServiceOfferingVO svo = _offeringsDao.findById(vm.getId(), vm.getServiceOfferingId()); CapacityVO capacityCpu = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_CPU); CapacityVO capacityMemory = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_MEMORY); CapacityVO capacityCpuCore = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_CPU_CORE); Long clusterId = null; if (hostId != null) { HostVO host = _hostDao.findById(hostId); if (host == null) { s_logger.warn("Host " + hostId + " no long exist anymore!"); return true; } clusterId = host.getClusterId(); } if (capacityCpu == null || capacityMemory == null || svo == null || capacityCpuCore == null) { return false; } try { final Long clusterIdFinal = clusterId; final long capacityCpuId = capacityCpu.getId(); final long capacityMemoryId = capacityMemory.getId(); final long capacityCpuCoreId = capacityCpuCore.getId(); Transaction.execute(new TransactionCallbackNoReturn() { @Override public void doInTransactionWithoutResult(TransactionStatus status) { CapacityVO capacityCpu = _capacityDao.lockRow(capacityCpuId, true); CapacityVO capacityMemory = _capacityDao.lockRow(capacityMemoryId, true); CapacityVO capacityCpuCore = _capacityDao.lockRow(capacityCpuCoreId, true); long usedCpu = capacityCpu.getUsedCapacity(); long usedMem = capacityMemory.getUsedCapacity(); long usedCpuCore = capacityCpuCore.getUsedCapacity(); long reservedCpu = capacityCpu.getReservedCapacity(); long reservedMem = capacityMemory.getReservedCapacity(); long reservedCpuCore = capacityCpuCore.getReservedCapacity(); long actualTotalCpu = capacityCpu.getTotalCapacity(); float cpuOvercommitRatio = Float.parseFloat(_clusterDetailsDao.findDetail(clusterIdFinal, "cpuOvercommitRatio").getValue()); float memoryOvercommitRatio = Float.parseFloat(_clusterDetailsDao.findDetail(clusterIdFinal, "memoryOvercommitRatio").getValue()); int vmCPU = svo.getCpu() * svo.getSpeed(); int vmCPUCore = svo.getCpu(); long vmMem = svo.getRamSize() * 1024L * 1024L; long actualTotalMem = capacityMemory.getTotalCapacity(); long totalMem = (long)(actualTotalMem * memoryOvercommitRatio); long totalCpu = (long)(actualTotalCpu * cpuOvercommitRatio); if (s_logger.isDebugEnabled()) { s_logger.debug("Hosts's actual total CPU: " + actualTotalCpu + " and CPU after applying overprovisioning: " + totalCpu); s_logger.debug("Hosts's actual total RAM: " + actualTotalMem + " and RAM after applying overprovisioning: " + totalMem); } if (!moveFromReserved) { /* move resource from used */ if (usedCpu >= vmCPU) { capacityCpu.setUsedCapacity(usedCpu - vmCPU); } if (usedMem >= vmMem) { capacityMemory.setUsedCapacity(usedMem - vmMem); } if (usedCpuCore >= vmCPUCore) { capacityCpuCore.setUsedCapacity(usedCpuCore - vmCPUCore); } if (moveToReservered) { if (reservedCpu + vmCPU <= totalCpu) { capacityCpu.setReservedCapacity(reservedCpu + vmCPU); } if (reservedMem + vmMem <= totalMem) { capacityMemory.setReservedCapacity(reservedMem + vmMem); } capacityCpuCore.setReservedCapacity(reservedCpuCore + vmCPUCore); } } else { if (reservedCpu >= vmCPU) { capacityCpu.setReservedCapacity(reservedCpu - vmCPU); } if (reservedMem >= vmMem) { capacityMemory.setReservedCapacity(reservedMem - vmMem); } if (reservedCpuCore >= vmCPUCore) { capacityCpuCore.setReservedCapacity(reservedCpuCore - vmCPUCore); } } s_logger.debug("release cpu from host: " + hostId + ", old used: " + usedCpu + ",reserved: " + reservedCpu + ", actual total: " + actualTotalCpu + ", total with overprovisioning: " + totalCpu + "; new used: " + capacityCpu.getUsedCapacity() + ",reserved:" + capacityCpu.getReservedCapacity() + "; movedfromreserved: " + moveFromReserved + ",moveToReservered" + moveToReservered); s_logger.debug("release mem from host: " + hostId + ", old used: " + usedMem + ",reserved: " + reservedMem + ", total: " + totalMem + "; new used: " + capacityMemory.getUsedCapacity() + ",reserved:" + capacityMemory.getReservedCapacity() + "; movedfromreserved: " + moveFromReserved + ",moveToReservered" + moveToReservered); _capacityDao.update(capacityCpu.getId(), capacityCpu); _capacityDao.update(capacityMemory.getId(), capacityMemory); _capacityDao.update(capacityCpuCore.getId(), capacityCpuCore); } }); return true; } catch (Exception e) { s_logger.debug("Failed to transit vm's state, due to " + e.getMessage()); return false; } } @DB @Override public void allocateVmCapacity(VirtualMachine vm, final boolean fromLastHost) { final long vmId = vm.getId(); final long hostId = vm.getHostId(); final HostVO host = _hostDao.findById(hostId); final long clusterId = host.getClusterId(); final float cpuOvercommitRatio = Float.parseFloat(_clusterDetailsDao.findDetail(clusterId, "cpuOvercommitRatio").getValue()); final float memoryOvercommitRatio = Float.parseFloat(_clusterDetailsDao.findDetail(clusterId, "memoryOvercommitRatio").getValue()); final ServiceOfferingVO svo = _offeringsDao.findById(vm.getId(), vm.getServiceOfferingId()); CapacityVO capacityCpu = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_CPU); CapacityVO capacityMem = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_MEMORY); CapacityVO capacityCpuCore = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_CPU_CORE); if (capacityCpu == null || capacityMem == null || svo == null || capacityCpuCore == null) { return; } final int cpu = svo.getCpu() * svo.getSpeed(); final int cpucore = svo.getCpu(); final int cpuspeed = svo.getSpeed(); final long ram = svo.getRamSize() * 1024L * 1024L; try { final long capacityCpuId = capacityCpu.getId(); final long capacityMemId = capacityMem.getId(); final long capacityCpuCoreId = capacityCpuCore.getId(); Transaction.execute(new TransactionCallbackNoReturn() { @Override public void doInTransactionWithoutResult(TransactionStatus status) { CapacityVO capacityCpu = _capacityDao.lockRow(capacityCpuId, true); CapacityVO capacityMem = _capacityDao.lockRow(capacityMemId, true); CapacityVO capacityCpuCore = _capacityDao.lockRow(capacityCpuCoreId, true); long usedCpu = capacityCpu.getUsedCapacity(); long usedMem = capacityMem.getUsedCapacity(); long usedCpuCore = capacityCpuCore.getUsedCapacity(); long reservedCpu = capacityCpu.getReservedCapacity(); long reservedMem = capacityMem.getReservedCapacity(); long reservedCpuCore = capacityCpuCore.getReservedCapacity(); long actualTotalCpu = capacityCpu.getTotalCapacity(); long actualTotalMem = capacityMem.getTotalCapacity(); long totalCpu = (long)(actualTotalCpu * cpuOvercommitRatio); long totalMem = (long)(actualTotalMem * memoryOvercommitRatio); if (s_logger.isDebugEnabled()) { s_logger.debug("Hosts's actual total CPU: " + actualTotalCpu + " and CPU after applying overprovisioning: " + totalCpu); } long freeCpu = totalCpu - (reservedCpu + usedCpu); long freeMem = totalMem - (reservedMem + usedMem); if (s_logger.isDebugEnabled()) { s_logger.debug("We are allocating VM, increasing the used capacity of this host:" + hostId); s_logger.debug("Current Used CPU: " + usedCpu + " , Free CPU:" + freeCpu + " ,Requested CPU: " + cpu); s_logger.debug("Current Used RAM: " + usedMem + " , Free RAM:" + freeMem + " ,Requested RAM: " + ram); } capacityCpu.setUsedCapacity(usedCpu + cpu); capacityMem.setUsedCapacity(usedMem + ram); capacityCpuCore.setUsedCapacity(usedCpuCore + cpucore); if (fromLastHost) { /* alloc from reserved */ if (s_logger.isDebugEnabled()) { s_logger.debug("We are allocating VM to the last host again, so adjusting the reserved capacity if it is not less than required"); s_logger.debug("Reserved CPU: " + reservedCpu + " , Requested CPU: " + cpu); s_logger.debug("Reserved RAM: " + reservedMem + " , Requested RAM: " + ram); } if (reservedCpu >= cpu && reservedMem >= ram) { capacityCpu.setReservedCapacity(reservedCpu - cpu); capacityMem.setReservedCapacity(reservedMem - ram); capacityCpuCore.setReservedCapacity(reservedCpuCore - cpucore); } } else { /* alloc from free resource */ if (!((reservedCpu + usedCpu + cpu <= totalCpu) && (reservedMem + usedMem + ram <= totalMem))) { if (s_logger.isDebugEnabled()) { s_logger.debug("Host doesnt seem to have enough free capacity, but increasing the used capacity anyways, " + "since the VM is already starting on this host "); } } } s_logger.debug("CPU STATS after allocation: for host: " + hostId + ", old used: " + usedCpu + ", old reserved: " + reservedCpu + ", actual total: " + actualTotalCpu + ", total with overprovisioning: " + totalCpu + "; new used:" + capacityCpu.getUsedCapacity() + ", reserved:" + capacityCpu.getReservedCapacity() + "; requested cpu:" + cpu + ",alloc_from_last:" + fromLastHost); s_logger.debug("RAM STATS after allocation: for host: " + hostId + ", old used: " + usedMem + ", old reserved: " + reservedMem + ", total: " + totalMem + "; new used: " + capacityMem.getUsedCapacity() + ", reserved: " + capacityMem.getReservedCapacity() + "; requested mem: " + ram + ",alloc_from_last:" + fromLastHost); long cluster_id = host.getClusterId(); ClusterDetailsVO cluster_detail_cpu = _clusterDetailsDao.findDetail(cluster_id, "cpuOvercommitRatio"); ClusterDetailsVO cluster_detail_ram = _clusterDetailsDao.findDetail(cluster_id, "memoryOvercommitRatio"); Float cpuOvercommitRatio = Float.parseFloat(cluster_detail_cpu.getValue()); Float memoryOvercommitRatio = Float.parseFloat(cluster_detail_ram.getValue()); boolean hostHasCpuCapability, hostHasCapacity = false; hostHasCpuCapability = checkIfHostHasCpuCapability(host.getId(), cpucore, cpuspeed); if (hostHasCpuCapability) { // first check from reserved capacity hostHasCapacity = checkIfHostHasCapacity(host.getId(), cpu, ram, true, cpuOvercommitRatio, memoryOvercommitRatio, true); // if not reserved, check the free capacity if (!hostHasCapacity) hostHasCapacity = checkIfHostHasCapacity(host.getId(), cpu, ram, false, cpuOvercommitRatio, memoryOvercommitRatio, true); } if (!hostHasCapacity || !hostHasCpuCapability) { throw new CloudRuntimeException("Host does not have enough capacity for vm " + vmId); } _capacityDao.update(capacityCpu.getId(), capacityCpu); _capacityDao.update(capacityMem.getId(), capacityMem); _capacityDao.update(capacityCpuCore.getId(), capacityCpuCore); } }); } catch (Exception e) { s_logger.error("Exception allocating VM capacity", e); if (e instanceof CloudRuntimeException) { throw e; } return; } } @Override public boolean checkIfHostHasCpuCapability(long hostId, Integer cpuNum, Integer cpuSpeed) { // Check host can support the Cpu Number and Speed. Host host = _hostDao.findById(hostId); boolean isCpuNumGood = host.getCpus().intValue() >= cpuNum; boolean isCpuSpeedGood = host.getSpeed().intValue() >= cpuSpeed; if (isCpuNumGood && isCpuSpeedGood) { if (s_logger.isDebugEnabled()) { s_logger.debug("Host: " + hostId + " has cpu capability (cpu:" + host.getCpus() + ", speed:" + host.getSpeed() + ") to support requested CPU: " + cpuNum + " and requested speed: " + cpuSpeed); } return true; } else { if (s_logger.isDebugEnabled()) { s_logger.debug("Host: " + hostId + " doesn't have cpu capability (cpu:" + host.getCpus() + ", speed:" + host.getSpeed() + ") to support requested CPU: " + cpuNum + " and requested speed: " + cpuSpeed); } return false; } } @Override public boolean checkIfHostHasCapacity(long hostId, Integer cpu, long ram, boolean checkFromReservedCapacity, float cpuOvercommitRatio, float memoryOvercommitRatio, boolean considerReservedCapacity) { boolean hasCapacity = false; if (s_logger.isDebugEnabled()) { s_logger.debug("Checking if host: " + hostId + " has enough capacity for requested CPU: " + cpu + " and requested RAM: " + ram + " , cpuOverprovisioningFactor: " + cpuOvercommitRatio); } CapacityVO capacityCpu = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_CPU); CapacityVO capacityMem = _capacityDao.findByHostIdType(hostId, Capacity.CAPACITY_TYPE_MEMORY); if (capacityCpu == null || capacityMem == null) { if (capacityCpu == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Cannot checkIfHostHasCapacity, Capacity entry for CPU not found in Db, for hostId: " + hostId); } } if (capacityMem == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Cannot checkIfHostHasCapacity, Capacity entry for RAM not found in Db, for hostId: " + hostId); } } return false; } long usedCpu = capacityCpu.getUsedCapacity(); long usedMem = capacityMem.getUsedCapacity(); long reservedCpu = capacityCpu.getReservedCapacity(); long reservedMem = capacityMem.getReservedCapacity(); long actualTotalCpu = capacityCpu.getTotalCapacity(); long actualTotalMem = capacityMem.getTotalCapacity(); long totalCpu = (long)(actualTotalCpu * cpuOvercommitRatio); long totalMem = (long)(actualTotalMem * memoryOvercommitRatio); if (s_logger.isDebugEnabled()) { s_logger.debug("Hosts's actual total CPU: " + actualTotalCpu + " and CPU after applying overprovisioning: " + totalCpu); } String failureReason = ""; if (checkFromReservedCapacity) { long freeCpu = reservedCpu; long freeMem = reservedMem; if (s_logger.isDebugEnabled()) { s_logger.debug("We need to allocate to the last host again, so checking if there is enough reserved capacity"); s_logger.debug("Reserved CPU: " + freeCpu + " , Requested CPU: " + cpu); s_logger.debug("Reserved RAM: " + freeMem + " , Requested RAM: " + ram); } /* alloc from reserved */ if (reservedCpu >= cpu) { if (reservedMem >= ram) { hasCapacity = true; } else { failureReason = "Host does not have enough reserved RAM available"; } } else { failureReason = "Host does not have enough reserved CPU available"; } } else { long reservedCpuValueToUse = reservedCpu; long reservedMemValueToUse = reservedMem; if (!considerReservedCapacity) { if (s_logger.isDebugEnabled()) { s_logger.debug("considerReservedCapacity is" + considerReservedCapacity + " , not considering reserved capacity for calculating free capacity"); } reservedCpuValueToUse = 0; reservedMemValueToUse = 0; } long freeCpu = totalCpu - (reservedCpuValueToUse + usedCpu); long freeMem = totalMem - (reservedMemValueToUse + usedMem); if (s_logger.isDebugEnabled()) { s_logger.debug("Free CPU: " + freeCpu + " , Requested CPU: " + cpu); s_logger.debug("Free RAM: " + freeMem + " , Requested RAM: " + ram); } /* alloc from free resource */ if ((reservedCpuValueToUse + usedCpu + cpu <= totalCpu)) { if ((reservedMemValueToUse + usedMem + ram <= totalMem)) { hasCapacity = true; } else { failureReason = "Host does not have enough RAM available"; } } else { failureReason = "Host does not have enough CPU available"; } } if (hasCapacity) { if (s_logger.isDebugEnabled()) { s_logger.debug("Host has enough CPU and RAM available"); } s_logger.debug("STATS: Can alloc CPU from host: " + hostId + ", used: " + usedCpu + ", reserved: " + reservedCpu + ", actual total: " + actualTotalCpu + ", total with overprovisioning: " + totalCpu + "; requested cpu:" + cpu + ",alloc_from_last_host?:" + checkFromReservedCapacity + " ,considerReservedCapacity?: " + considerReservedCapacity); s_logger.debug("STATS: Can alloc MEM from host: " + hostId + ", used: " + usedMem + ", reserved: " + reservedMem + ", total: " + totalMem + "; requested mem: " + ram + ",alloc_from_last_host?:" + checkFromReservedCapacity + " ,considerReservedCapacity?: " + considerReservedCapacity); } else { if (checkFromReservedCapacity) { s_logger.debug("STATS: Failed to alloc resource from host: " + hostId + " reservedCpu: " + reservedCpu + ", requested cpu: " + cpu + ", reservedMem: " + reservedMem + ", requested mem: " + ram); } else { s_logger.debug("STATS: Failed to alloc resource from host: " + hostId + " reservedCpu: " + reservedCpu + ", used cpu: " + usedCpu + ", requested cpu: " + cpu + ", actual total cpu: " + actualTotalCpu + ", total cpu with overprovisioning: " + totalCpu + ", reservedMem: " + reservedMem + ", used Mem: " + usedMem + ", requested mem: " + ram + ", total Mem:" + totalMem + " ,considerReservedCapacity?: " + considerReservedCapacity); } if (s_logger.isDebugEnabled()) { s_logger.debug(failureReason + ", cannot allocate to this host."); } } return hasCapacity; } @Override public long getUsedBytes(StoragePoolVO pool) { DataStoreProvider storeProvider = _dataStoreProviderMgr.getDataStoreProvider(pool.getStorageProviderName()); DataStoreDriver storeDriver = storeProvider.getDataStoreDriver(); if (storeDriver instanceof PrimaryDataStoreDriver) { PrimaryDataStoreDriver primaryStoreDriver = (PrimaryDataStoreDriver)storeDriver; return primaryStoreDriver.getUsedBytes(pool); } throw new CloudRuntimeException("Storage driver in CapacityManagerImpl.getUsedBytes(StoragePoolVO) is not a PrimaryDataStoreDriver."); } @Override public long getUsedIops(StoragePoolVO pool) { DataStoreProvider storeProvider = _dataStoreProviderMgr.getDataStoreProvider(pool.getStorageProviderName()); DataStoreDriver storeDriver = storeProvider.getDataStoreDriver(); if (storeDriver instanceof PrimaryDataStoreDriver) { PrimaryDataStoreDriver primaryStoreDriver = (PrimaryDataStoreDriver)storeDriver; return primaryStoreDriver.getUsedIops(pool); } throw new CloudRuntimeException("Storage driver in CapacityManagerImpl.getUsedIops(StoragePoolVO) is not a PrimaryDataStoreDriver."); } @Override public long getAllocatedPoolCapacity(StoragePoolVO pool, VMTemplateVO templateForVmCreation) { long totalAllocatedSize = 0; // if the storage pool is managed, the used bytes can be larger than the sum of the sizes of all of the non-destroyed volumes // in this case, call getUsedBytes(StoragePoolVO) if (pool.isManaged()) { return getUsedBytes(pool); } else { // Get size for all the non-destroyed volumes. Pair<Long, Long> sizes = _volumeDao.getNonDestroyedCountAndTotalByPool(pool.getId()); totalAllocatedSize = sizes.second() + sizes.first() * _extraBytesPerVolume; } // Get size for VM Snapshots. totalAllocatedSize += _volumeDao.getVMSnapshotSizeByPool(pool.getId()); boolean tmpInstalled = false; // Iterate through all templates on this storage pool. List<VMTemplateStoragePoolVO> templatePoolVOs = _templatePoolDao.listByPoolId(pool.getId()); for (VMTemplateStoragePoolVO templatePoolVO : templatePoolVOs) { if ((templateForVmCreation != null) && !tmpInstalled && (templatePoolVO.getTemplateId() == templateForVmCreation.getId())) { tmpInstalled = true; } long templateSize = templatePoolVO.getTemplateSize(); totalAllocatedSize += templateSize + _extraBytesPerVolume; } if ((templateForVmCreation != null) && !tmpInstalled) { long templateForVmCreationSize = templateForVmCreation.getSize() != null ? templateForVmCreation.getSize() : 0; totalAllocatedSize += templateForVmCreationSize + _extraBytesPerVolume; } return totalAllocatedSize; } @DB @Override public void updateCapacityForHost(final Host host) { // prepare the service offerings List<ServiceOfferingVO> offerings = _offeringsDao.listAllIncludingRemoved(); Map<Long, ServiceOfferingVO> offeringsMap = new HashMap<Long, ServiceOfferingVO>(); for (ServiceOfferingVO offering : offerings) { offeringsMap.put(offering.getId(), offering); } long usedCpuCore = 0; long reservedCpuCore = 0; long usedCpu = 0; long usedMemory = 0; long reservedMemory = 0; long reservedCpu = 0; final CapacityState capacityState = (host.getResourceState() == ResourceState.Enabled) ? CapacityState.Enabled : CapacityState.Disabled; List<VMInstanceVO> vms = _vmDao.listUpByHostId(host.getId()); if (s_logger.isDebugEnabled()) { s_logger.debug("Found " + vms.size() + " VMs on host " + host.getId()); } final List<VMInstanceVO> vosMigrating = _vmDao.listVmsMigratingFromHost(host.getId()); if (s_logger.isDebugEnabled()) { s_logger.debug("Found " + vosMigrating.size() + " VMs are Migrating from host " + host.getId()); } vms.addAll(vosMigrating); ClusterVO cluster = _clusterDao.findById(host.getClusterId()); ClusterDetailsVO clusterDetailCpu = _clusterDetailsDao.findDetail(cluster.getId(), "cpuOvercommitRatio"); ClusterDetailsVO clusterDetailRam = _clusterDetailsDao.findDetail(cluster.getId(), "memoryOvercommitRatio"); Float clusterCpuOvercommitRatio = Float.parseFloat(clusterDetailCpu.getValue()); Float clusterRamOvercommitRatio = Float.parseFloat(clusterDetailRam.getValue()); for (VMInstanceVO vm : vms) { Float cpuOvercommitRatio = 1.0f; Float ramOvercommitRatio = 1.0f; Map<String, String> vmDetails = _userVmDetailsDao.listDetailsKeyPairs(vm.getId()); String vmDetailCpu = vmDetails.get("cpuOvercommitRatio"); String vmDetailRam = vmDetails.get("memoryOvercommitRatio"); if (vmDetailCpu != null) { //if vmDetail_cpu is not null it means it is running in a overcommited cluster. cpuOvercommitRatio = Float.parseFloat(vmDetailCpu); ramOvercommitRatio = Float.parseFloat(vmDetailRam); } ServiceOffering so = offeringsMap.get(vm.getServiceOfferingId()); if (so.isDynamic()) { usedMemory += ((Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.memory.name())) * 1024L * 1024L) / ramOvercommitRatio) * clusterRamOvercommitRatio; if(vmDetails.containsKey(UsageEventVO.DynamicParameters.cpuSpeed.name())) { usedCpu += ((Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuNumber.name())) * Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuSpeed.name()))) / cpuOvercommitRatio) * clusterCpuOvercommitRatio; } else { usedCpu += ((Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuNumber.name())) * so.getSpeed()) / cpuOvercommitRatio) * clusterCpuOvercommitRatio; } usedCpuCore += Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuNumber.name())); } else { usedMemory += ((so.getRamSize() * 1024L * 1024L) / ramOvercommitRatio) * clusterRamOvercommitRatio; usedCpu += ((so.getCpu() * so.getSpeed()) / cpuOvercommitRatio) * clusterCpuOvercommitRatio; usedCpuCore += so.getCpu(); } } List<VMInstanceVO> vmsByLastHostId = _vmDao.listByLastHostId(host.getId()); if (s_logger.isDebugEnabled()) { s_logger.debug("Found " + vmsByLastHostId.size() + " VM, not running on host " + host.getId()); } for (VMInstanceVO vm : vmsByLastHostId) { Float cpuOvercommitRatio = 1.0f; Float ramOvercommitRatio = 1.0f; long secondsSinceLastUpdate = (DateUtil.currentGMTTime().getTime() - vm.getUpdateTime().getTime()) / 1000; if (secondsSinceLastUpdate < _vmCapacityReleaseInterval) { UserVmDetailVO vmDetailCpu = _userVmDetailsDao.findDetail(vm.getId(), VmDetailConstants.CPU_OVER_COMMIT_RATIO); UserVmDetailVO vmDetailRam = _userVmDetailsDao.findDetail(vm.getId(), VmDetailConstants.MEMORY_OVER_COMMIT_RATIO); if (vmDetailCpu != null) { //if vmDetail_cpu is not null it means it is running in a overcommited cluster. cpuOvercommitRatio = Float.parseFloat(vmDetailCpu.getValue()); ramOvercommitRatio = Float.parseFloat(vmDetailRam.getValue()); } ServiceOffering so = offeringsMap.get(vm.getServiceOfferingId()); Map<String, String> vmDetails = _userVmDetailsDao.listDetailsKeyPairs(vm.getId()); if (so.isDynamic()) { reservedMemory += ((Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.memory.name())) * 1024L * 1024L) / ramOvercommitRatio) * clusterRamOvercommitRatio; if(vmDetails.containsKey(UsageEventVO.DynamicParameters.cpuSpeed.name())) { reservedCpu += ((Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuNumber.name())) * Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuSpeed.name()))) / cpuOvercommitRatio) * clusterCpuOvercommitRatio; } else { reservedCpu += ((Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuNumber.name())) * so.getSpeed()) / cpuOvercommitRatio) * clusterCpuOvercommitRatio; } reservedCpuCore += Integer.parseInt(vmDetails.get(UsageEventVO.DynamicParameters.cpuNumber.name())); } else { reservedMemory += ((so.getRamSize() * 1024L * 1024L) / ramOvercommitRatio) * clusterRamOvercommitRatio; reservedCpu += (so.getCpu() * so.getSpeed() / cpuOvercommitRatio) * clusterCpuOvercommitRatio; reservedCpuCore += so.getCpu(); } } else { // signal if not done already, that the VM has been stopped for skip.counting.hours, // hence capacity will not be reserved anymore. UserVmDetailVO messageSentFlag = _userVmDetailsDao.findDetail(vm.getId(), VmDetailConstants.MESSAGE_RESERVED_CAPACITY_FREED_FLAG); if (messageSentFlag == null || !Boolean.valueOf(messageSentFlag.getValue())) { _messageBus.publish(_name, "VM_ReservedCapacity_Free", PublishScope.LOCAL, vm); if (vm.getType() == VirtualMachine.Type.User) { UserVmVO userVM = _userVMDao.findById(vm.getId()); _userVMDao.loadDetails(userVM); userVM.setDetail(VmDetailConstants.MESSAGE_RESERVED_CAPACITY_FREED_FLAG, "true"); _userVMDao.saveDetails(userVM); } } } } CapacityVO cpuCap = _capacityDao.findByHostIdType(host.getId(), Capacity.CAPACITY_TYPE_CPU); CapacityVO memCap = _capacityDao.findByHostIdType(host.getId(), Capacity.CAPACITY_TYPE_MEMORY); CapacityVO cpuCoreCap = _capacityDao.findByHostIdType(host.getId(), CapacityVO.CAPACITY_TYPE_CPU_CORE); if (cpuCoreCap != null) { long hostTotalCpuCore = host.getCpus().longValue(); if (cpuCoreCap.getTotalCapacity() != hostTotalCpuCore) { s_logger.debug("Calibrate total cpu for host: " + host.getId() + " old total CPU:" + cpuCoreCap.getTotalCapacity() + " new total CPU:" + hostTotalCpuCore); cpuCoreCap.setTotalCapacity(hostTotalCpuCore); } if (cpuCoreCap.getUsedCapacity() == usedCpuCore && cpuCoreCap.getReservedCapacity() == reservedCpuCore) { s_logger.debug("No need to calibrate cpu capacity, host:" + host.getId() + " usedCpuCore: " + cpuCoreCap.getUsedCapacity() + " reservedCpuCore: " + cpuCoreCap.getReservedCapacity()); } else { if (cpuCoreCap.getReservedCapacity() != reservedCpuCore) { s_logger.debug("Calibrate reserved cpu core for host: " + host.getId() + " old reservedCpuCore:" + cpuCoreCap.getReservedCapacity() + " new reservedCpuCore:" + reservedCpuCore); cpuCoreCap.setReservedCapacity(reservedCpuCore); } if (cpuCoreCap.getUsedCapacity() != usedCpuCore) { s_logger.debug("Calibrate used cpu core for host: " + host.getId() + " old usedCpuCore:" + cpuCoreCap.getUsedCapacity() + " new usedCpuCore:" + usedCpuCore); cpuCoreCap.setUsedCapacity(usedCpuCore); } } try { _capacityDao.update(cpuCoreCap.getId(), cpuCoreCap); } catch (Exception e) { s_logger.error("Caught exception while updating cpucore capacity for the host " +host.getId(), e); } } else { final long usedCpuCoreFinal = usedCpuCore; final long reservedCpuCoreFinal = reservedCpuCore; Transaction.execute(new TransactionCallbackNoReturn() { @Override public void doInTransactionWithoutResult(TransactionStatus status) { CapacityVO capacity = new CapacityVO(host.getId(), host.getDataCenterId(), host.getPodId(), host.getClusterId(), usedCpuCoreFinal, host.getCpus().longValue(), CapacityVO.CAPACITY_TYPE_CPU_CORE); capacity.setReservedCapacity(reservedCpuCoreFinal); capacity.setCapacityState(capacityState); _capacityDao.persist(capacity); } }); } if (cpuCap != null && memCap != null) { if (host.getTotalMemory() != null) { memCap.setTotalCapacity(host.getTotalMemory()); } long hostTotalCpu = host.getCpus().longValue() * host.getSpeed().longValue(); if (cpuCap.getTotalCapacity() != hostTotalCpu) { s_logger.debug("Calibrate total cpu for host: " + host.getId() + " old total CPU:" + cpuCap.getTotalCapacity() + " new total CPU:" + hostTotalCpu); cpuCap.setTotalCapacity(hostTotalCpu); } // Set the capacity state as per the host allocation state. if(capacityState != cpuCap.getCapacityState()){ s_logger.debug("Calibrate cpu capacity state for host: " + host.getId() + " old capacity state:" + cpuCap.getTotalCapacity() + " new capacity state:" + hostTotalCpu); cpuCap.setCapacityState(capacityState); } memCap.setCapacityState(capacityState); if (cpuCap.getUsedCapacity() == usedCpu && cpuCap.getReservedCapacity() == reservedCpu) { s_logger.debug("No need to calibrate cpu capacity, host:" + host.getId() + " usedCpu: " + cpuCap.getUsedCapacity() + " reservedCpu: " + cpuCap.getReservedCapacity()); } else { if (cpuCap.getReservedCapacity() != reservedCpu) { s_logger.debug("Calibrate reserved cpu for host: " + host.getId() + " old reservedCpu:" + cpuCap.getReservedCapacity() + " new reservedCpu:" + reservedCpu); cpuCap.setReservedCapacity(reservedCpu); } if (cpuCap.getUsedCapacity() != usedCpu) { s_logger.debug("Calibrate used cpu for host: " + host.getId() + " old usedCpu:" + cpuCap.getUsedCapacity() + " new usedCpu:" + usedCpu); cpuCap.setUsedCapacity(usedCpu); } } if (memCap.getTotalCapacity() != host.getTotalMemory()) { s_logger.debug("Calibrate total memory for host: " + host.getId() + " old total memory:" + memCap.getTotalCapacity() + " new total memory:" + host.getTotalMemory()); memCap.setTotalCapacity(host.getTotalMemory()); } // Set the capacity state as per the host allocation state. if(capacityState != memCap.getCapacityState()){ s_logger.debug("Calibrate memory capacity state for host: " + host.getId() + " old capacity state:" + memCap.getTotalCapacity() + " new capacity state:" + hostTotalCpu); memCap.setCapacityState(capacityState); } if (memCap.getUsedCapacity() == usedMemory && memCap.getReservedCapacity() == reservedMemory) { s_logger.debug("No need to calibrate memory capacity, host:" + host.getId() + " usedMem: " + memCap.getUsedCapacity() + " reservedMem: " + memCap.getReservedCapacity()); } else { if (memCap.getReservedCapacity() != reservedMemory) { s_logger.debug("Calibrate reserved memory for host: " + host.getId() + " old reservedMem:" + memCap.getReservedCapacity() + " new reservedMem:" + reservedMemory); memCap.setReservedCapacity(reservedMemory); } if (memCap.getUsedCapacity() != usedMemory) { /* * Didn't calibrate for used memory, because VMs can be in * state(starting/migrating) that I don't know on which host * they are allocated */ s_logger.debug("Calibrate used memory for host: " + host.getId() + " old usedMem: " + memCap.getUsedCapacity() + " new usedMem: " + usedMemory); memCap.setUsedCapacity(usedMemory); } } try { _capacityDao.update(cpuCap.getId(), cpuCap); _capacityDao.update(memCap.getId(), memCap); } catch (Exception e) { s_logger.error("Caught exception while updating cpu/memory capacity for the host " + host.getId(), e); } } else { final long usedMemoryFinal = usedMemory; final long reservedMemoryFinal = reservedMemory; final long usedCpuFinal = usedCpu; final long reservedCpuFinal = reservedCpu; Transaction.execute(new TransactionCallbackNoReturn() { @Override public void doInTransactionWithoutResult(TransactionStatus status) { CapacityVO capacity = new CapacityVO(host.getId(), host.getDataCenterId(), host.getPodId(), host.getClusterId(), usedMemoryFinal, host.getTotalMemory(), Capacity.CAPACITY_TYPE_MEMORY); capacity.setReservedCapacity(reservedMemoryFinal); capacity.setCapacityState(capacityState); _capacityDao.persist(capacity); capacity = new CapacityVO(host.getId(), host.getDataCenterId(), host.getPodId(), host.getClusterId(), usedCpuFinal, host.getCpus().longValue() * host.getSpeed().longValue(), Capacity.CAPACITY_TYPE_CPU); capacity.setReservedCapacity(reservedCpuFinal); capacity.setCapacityState(capacityState); _capacityDao.persist(capacity); } }); } } @Override public boolean preStateTransitionEvent(State oldState, Event event, State newState, VirtualMachine vm, boolean transitionStatus, Object opaque) { return true; } @Override public boolean postStateTransitionEvent(StateMachine2.Transition<State, Event> transition, VirtualMachine vm, boolean status, Object opaque) { if (!status) { return false; } @SuppressWarnings("unchecked") Pair<Long, Long> hosts = (Pair<Long, Long>)opaque; Long oldHostId = hosts.first(); State oldState = transition.getCurrentState(); State newState = transition.getToState(); Event event = transition.getEvent(); s_logger.debug("VM state transitted from :" + oldState + " to " + newState + " with event: " + event + "vm's original host id: " + vm.getLastHostId() + " new host id: " + vm.getHostId() + " host id before state transition: " + oldHostId); if (oldState == State.Starting) { if (newState != State.Running) { releaseVmCapacity(vm, false, false, oldHostId); } } else if (oldState == State.Running) { if (event == Event.AgentReportStopped) { releaseVmCapacity(vm, false, true, oldHostId); } else if (event == Event.AgentReportMigrated) { releaseVmCapacity(vm, false, false, oldHostId); } } else if (oldState == State.Migrating) { if (event == Event.AgentReportStopped) { /* Release capacity from original host */ releaseVmCapacity(vm, false, false, vm.getLastHostId()); releaseVmCapacity(vm, false, false, oldHostId); } else if (event == Event.OperationFailed) { /* Release from dest host */ releaseVmCapacity(vm, false, false, oldHostId); } else if (event == Event.OperationSucceeded) { releaseVmCapacity(vm, false, false, vm.getLastHostId()); } } else if (oldState == State.Stopping) { if (event == Event.OperationSucceeded) { releaseVmCapacity(vm, false, true, oldHostId); } else if (event == Event.AgentReportStopped) { releaseVmCapacity(vm, false, false, oldHostId); } else if (event == Event.AgentReportMigrated) { releaseVmCapacity(vm, false, false, oldHostId); } } else if (oldState == State.Stopped) { if (event == Event.DestroyRequested || event == Event.ExpungeOperation) { releaseVmCapacity(vm, true, false, vm.getLastHostId()); } else if (event == Event.AgentReportMigrated) { releaseVmCapacity(vm, false, false, oldHostId); } } if ((newState == State.Starting || newState == State.Migrating || event == Event.AgentReportMigrated) && vm.getHostId() != null) { boolean fromLastHost = false; if (vm.getHostId().equals(vm.getLastHostId())) { s_logger.debug("VM starting again on the last host it was stopped on"); fromLastHost = true; } allocateVmCapacity(vm, fromLastHost); } if (newState == State.Stopped) { if (vm.getType() == VirtualMachine.Type.User) { UserVmVO userVM = _userVMDao.findById(vm.getId()); _userVMDao.loadDetails(userVM); // free the message sent flag if it exists userVM.setDetail(VmDetailConstants.MESSAGE_RESERVED_CAPACITY_FREED_FLAG, "false"); _userVMDao.saveDetails(userVM); } } return true; } // TODO: Get rid of this case once we've determined that the capacity listeners above have all the changes // create capacity entries if none exist for this server private void createCapacityEntry(StartupCommand startup, HostVO server) { SearchCriteria<CapacityVO> capacitySC = _capacityDao.createSearchCriteria(); capacitySC.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, server.getId()); capacitySC.addAnd("dataCenterId", SearchCriteria.Op.EQ, server.getDataCenterId()); capacitySC.addAnd("podId", SearchCriteria.Op.EQ, server.getPodId()); if (startup instanceof StartupRoutingCommand) { SearchCriteria<CapacityVO> capacityCPU = _capacityDao.createSearchCriteria(); capacityCPU.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, server.getId()); capacityCPU.addAnd("dataCenterId", SearchCriteria.Op.EQ, server.getDataCenterId()); capacityCPU.addAnd("podId", SearchCriteria.Op.EQ, server.getPodId()); capacityCPU.addAnd("capacityType", SearchCriteria.Op.EQ, Capacity.CAPACITY_TYPE_CPU); List<CapacityVO> capacityVOCpus = _capacityDao.search(capacitySC, null); Float cpuovercommitratio = Float.parseFloat(_clusterDetailsDao.findDetail(server.getClusterId(), "cpuOvercommitRatio").getValue()); Float memoryOvercommitRatio = Float.parseFloat(_clusterDetailsDao.findDetail(server.getClusterId(), "memoryOvercommitRatio").getValue()); if (capacityVOCpus != null && !capacityVOCpus.isEmpty()) { CapacityVO CapacityVOCpu = capacityVOCpus.get(0); long newTotalCpu = (long)(server.getCpus().longValue() * server.getSpeed().longValue() * cpuovercommitratio); if ((CapacityVOCpu.getTotalCapacity() <= newTotalCpu) || ((CapacityVOCpu.getUsedCapacity() + CapacityVOCpu.getReservedCapacity()) <= newTotalCpu)) { CapacityVOCpu.setTotalCapacity(newTotalCpu); } else if ((CapacityVOCpu.getUsedCapacity() + CapacityVOCpu.getReservedCapacity() > newTotalCpu) && (CapacityVOCpu.getUsedCapacity() < newTotalCpu)) { CapacityVOCpu.setReservedCapacity(0); CapacityVOCpu.setTotalCapacity(newTotalCpu); } else { s_logger.debug("What? new cpu is :" + newTotalCpu + ", old one is " + CapacityVOCpu.getUsedCapacity() + "," + CapacityVOCpu.getReservedCapacity() + "," + CapacityVOCpu.getTotalCapacity()); } _capacityDao.update(CapacityVOCpu.getId(), CapacityVOCpu); } else { CapacityVO capacity = new CapacityVO(server.getId(), server.getDataCenterId(), server.getPodId(), server.getClusterId(), 0L, server.getCpus().longValue() * server.getSpeed().longValue(), Capacity.CAPACITY_TYPE_CPU); _capacityDao.persist(capacity); } SearchCriteria<CapacityVO> capacityMem = _capacityDao.createSearchCriteria(); capacityMem.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, server.getId()); capacityMem.addAnd("dataCenterId", SearchCriteria.Op.EQ, server.getDataCenterId()); capacityMem.addAnd("podId", SearchCriteria.Op.EQ, server.getPodId()); capacityMem.addAnd("capacityType", SearchCriteria.Op.EQ, Capacity.CAPACITY_TYPE_MEMORY); List<CapacityVO> capacityVOMems = _capacityDao.search(capacityMem, null); if (capacityVOMems != null && !capacityVOMems.isEmpty()) { CapacityVO CapacityVOMem = capacityVOMems.get(0); long newTotalMem = (long)((server.getTotalMemory()) * memoryOvercommitRatio); if (CapacityVOMem.getTotalCapacity() <= newTotalMem || (CapacityVOMem.getUsedCapacity() + CapacityVOMem.getReservedCapacity() <= newTotalMem)) { CapacityVOMem.setTotalCapacity(newTotalMem); } else if (CapacityVOMem.getUsedCapacity() + CapacityVOMem.getReservedCapacity() > newTotalMem && CapacityVOMem.getUsedCapacity() < newTotalMem) { CapacityVOMem.setReservedCapacity(0); CapacityVOMem.setTotalCapacity(newTotalMem); } else { s_logger.debug("What? new cpu is :" + newTotalMem + ", old one is " + CapacityVOMem.getUsedCapacity() + "," + CapacityVOMem.getReservedCapacity() + "," + CapacityVOMem.getTotalCapacity()); } _capacityDao.update(CapacityVOMem.getId(), CapacityVOMem); } else { CapacityVO capacity = new CapacityVO(server.getId(), server.getDataCenterId(), server.getPodId(), server.getClusterId(), 0L, server.getTotalMemory(), Capacity.CAPACITY_TYPE_MEMORY); _capacityDao.persist(capacity); } } } @Override public float getClusterOverProvisioningFactor(Long clusterId, short capacityType) { String capacityOverProvisioningName = ""; if (capacityType == Capacity.CAPACITY_TYPE_CPU) { capacityOverProvisioningName = "cpuOvercommitRatio"; } else if (capacityType == Capacity.CAPACITY_TYPE_MEMORY) { capacityOverProvisioningName = "memoryOvercommitRatio"; } else { throw new CloudRuntimeException("Invalid capacityType - " + capacityType); } ClusterDetailsVO clusterDetailCpu = _clusterDetailsDao.findDetail(clusterId, capacityOverProvisioningName); Float clusterOverProvisioningRatio = Float.parseFloat(clusterDetailCpu.getValue()); return clusterOverProvisioningRatio; } @Override public boolean checkIfClusterCrossesThreshold(Long clusterId, Integer cpuRequested, long ramRequested) { Float clusterCpuOverProvisioning = getClusterOverProvisioningFactor(clusterId, Capacity.CAPACITY_TYPE_CPU); Float clusterMemoryOverProvisioning = getClusterOverProvisioningFactor(clusterId, Capacity.CAPACITY_TYPE_MEMORY); Float clusterCpuCapacityDisableThreshold = DeploymentClusterPlanner.ClusterCPUCapacityDisableThreshold.valueIn(clusterId); Float clusterMemoryCapacityDisableThreshold = DeploymentClusterPlanner.ClusterMemoryCapacityDisableThreshold.valueIn(clusterId); float cpuConsumption = _capacityDao.findClusterConsumption(clusterId, Capacity.CAPACITY_TYPE_CPU, cpuRequested); if (cpuConsumption / clusterCpuOverProvisioning > clusterCpuCapacityDisableThreshold) { s_logger.debug("Cluster: " + clusterId + " cpu consumption " + cpuConsumption / clusterCpuOverProvisioning + " crosses disable threshold " + clusterCpuCapacityDisableThreshold); return true; } float memoryConsumption = _capacityDao.findClusterConsumption(clusterId, Capacity.CAPACITY_TYPE_MEMORY, ramRequested); if (memoryConsumption / clusterMemoryOverProvisioning > clusterMemoryCapacityDisableThreshold) { s_logger.debug("Cluster: " + clusterId + " memory consumption " + memoryConsumption / clusterMemoryOverProvisioning + " crosses disable threshold " + clusterMemoryCapacityDisableThreshold); return true; } return false; } @Override public boolean processAnswers(long agentId, long seq, Answer[] answers) { // TODO Auto-generated method stub return false; } @Override public boolean processCommands(long agentId, long seq, Command[] commands) { // TODO Auto-generated method stub return false; } @Override public AgentControlAnswer processControlCommand(long agentId, AgentControlCommand cmd) { // TODO Auto-generated method stub return null; } @Override public void processHostAdded(long hostId) { } @Override public void processConnect(Host host, StartupCommand cmd, boolean forRebalance) throws ConnectionException { // TODO Auto-generated method stub } @Override public boolean processDisconnect(long agentId, Status state) { // TODO Auto-generated method stub return false; } @Override public void processHostAboutToBeRemoved(long hostId) { } @Override public void processHostRemoved(long hostId, long clusterId) { } @Override public boolean isRecurring() { // TODO Auto-generated method stub return false; } @Override public int getTimeout() { // TODO Auto-generated method stub return 0; } @Override public boolean processTimeout(long agentId, long seq) { // TODO Auto-generated method stub return false; } @Override public void processCancelMaintenaceEventAfter(Long hostId) { updateCapacityForHost(_hostDao.findById(hostId)); } @Override public void processCancelMaintenaceEventBefore(Long hostId) { // TODO Auto-generated method stub } @Override public void processDeletHostEventAfter(Host host) { // TODO Auto-generated method stub } @Override public void processDeleteHostEventBefore(Host host) { // TODO Auto-generated method stub } @Override public void processDiscoverEventAfter(Map<? extends ServerResource, Map<String, String>> resources) { // TODO Auto-generated method stub } @Override public void processDiscoverEventBefore(Long dcid, Long podId, Long clusterId, URI uri, String username, String password, List<String> hostTags) { // TODO Auto-generated method stub } @Override public void processPrepareMaintenaceEventAfter(Long hostId) { _capacityDao.removeBy(Capacity.CAPACITY_TYPE_MEMORY, null, null, null, hostId); _capacityDao.removeBy(Capacity.CAPACITY_TYPE_CPU, null, null, null, hostId); } @Override public void processPrepareMaintenaceEventBefore(Long hostId) { // TODO Auto-generated method stub } @Override public boolean checkIfHostReachMaxGuestLimit(Host host) { Long vmCount = _vmDao.countActiveByHostId(host.getId()); HypervisorType hypervisorType = host.getHypervisorType(); String hypervisorVersion = host.getHypervisorVersion(); Long maxGuestLimit = _hypervisorCapabilitiesDao.getMaxGuestsLimit(hypervisorType, hypervisorVersion); if (vmCount.longValue() >= maxGuestLimit.longValue()) { s_logger.info("Host name: " + host.getName() + ", hostId: " + host.getId() + " already reached max Running VMs(count includes system VMs), limit: " + maxGuestLimit + ", Running VM count: " + vmCount.longValue()); return true; } return false; } @Override public String getConfigComponentName() { return CapacityManager.class.getSimpleName(); } @Override public ConfigKey<?>[] getConfigKeys() { return new ConfigKey<?>[] {CpuOverprovisioningFactor, MemOverprovisioningFactor, StorageCapacityDisableThreshold, StorageOverprovisioningFactor, StorageAllocatedCapacityDisableThreshold, StorageOperationsExcludeCluster, VmwareCreateCloneFull, ImageStoreNFSVersion}; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.GroupByHashPageIndexerFactory; import com.facebook.presto.PagesIndexPageSorter; import com.facebook.presto.SystemSessionProperties; import com.facebook.presto.block.BlockEncodingManager; import com.facebook.presto.block.BlockJsonSerde; import com.facebook.presto.client.NodeVersion; import com.facebook.presto.connector.ConnectorManager; import com.facebook.presto.connector.system.SystemConnectorModule; import com.facebook.presto.cost.CoefficientBasedCostCalculator; import com.facebook.presto.cost.CostCalculator; import com.facebook.presto.event.query.QueryMonitor; import com.facebook.presto.event.query.QueryMonitorConfig; import com.facebook.presto.execution.LocationFactory; import com.facebook.presto.execution.NodeTaskMap; import com.facebook.presto.execution.QueryManager; import com.facebook.presto.execution.QueryManagerConfig; import com.facebook.presto.execution.QueryPerformanceFetcher; import com.facebook.presto.execution.QueryPerformanceFetcherProvider; import com.facebook.presto.execution.SqlTaskManager; import com.facebook.presto.execution.StageInfo; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.execution.TaskManager; import com.facebook.presto.execution.TaskManagerConfig; import com.facebook.presto.execution.TaskStatus; import com.facebook.presto.execution.executor.TaskExecutor; import com.facebook.presto.execution.resourceGroups.NoOpResourceGroupManager; import com.facebook.presto.execution.resourceGroups.ResourceGroupManager; import com.facebook.presto.execution.scheduler.FlatNetworkTopology; import com.facebook.presto.execution.scheduler.LegacyNetworkTopology; import com.facebook.presto.execution.scheduler.NetworkTopology; import com.facebook.presto.execution.scheduler.NodeScheduler; import com.facebook.presto.execution.scheduler.NodeSchedulerConfig; import com.facebook.presto.execution.scheduler.NodeSchedulerExporter; import com.facebook.presto.failureDetector.FailureDetector; import com.facebook.presto.failureDetector.FailureDetectorModule; import com.facebook.presto.index.IndexManager; import com.facebook.presto.memory.LocalMemoryManager; import com.facebook.presto.memory.LocalMemoryManagerExporter; import com.facebook.presto.memory.MemoryInfo; import com.facebook.presto.memory.MemoryManagerConfig; import com.facebook.presto.memory.MemoryPoolAssignmentsRequest; import com.facebook.presto.memory.MemoryResource; import com.facebook.presto.memory.NodeMemoryConfig; import com.facebook.presto.memory.ReservedSystemMemoryConfig; import com.facebook.presto.metadata.CatalogManager; import com.facebook.presto.metadata.DiscoveryNodeManager; import com.facebook.presto.metadata.ForNodeManager; import com.facebook.presto.metadata.HandleJsonModule; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.metadata.SchemaPropertyManager; import com.facebook.presto.metadata.SessionPropertyManager; import com.facebook.presto.metadata.StaticCatalogStore; import com.facebook.presto.metadata.StaticCatalogStoreConfig; import com.facebook.presto.metadata.TablePropertyManager; import com.facebook.presto.metadata.ViewDefinition; import com.facebook.presto.operator.ExchangeClientConfig; import com.facebook.presto.operator.ExchangeClientFactory; import com.facebook.presto.operator.ExchangeClientSupplier; import com.facebook.presto.operator.ForExchange; import com.facebook.presto.operator.LookupJoinOperators; import com.facebook.presto.operator.PagesIndex; import com.facebook.presto.operator.index.IndexJoinLookupStats; import com.facebook.presto.server.remotetask.HttpLocationFactory; import com.facebook.presto.spi.ConnectorSplit; import com.facebook.presto.spi.HostAddress; import com.facebook.presto.spi.PageIndexerFactory; import com.facebook.presto.spi.PageSorter; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockEncodingFactory; import com.facebook.presto.spi.block.BlockEncodingSerde; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.facebook.presto.spiller.FileSingleStreamSpillerFactory; import com.facebook.presto.spiller.GenericSpillerFactory; import com.facebook.presto.spiller.LocalSpillManager; import com.facebook.presto.spiller.NodeSpillConfig; import com.facebook.presto.spiller.SingleStreamSpillerFactory; import com.facebook.presto.spiller.SpillerFactory; import com.facebook.presto.spiller.SpillerStats; import com.facebook.presto.split.PageSinkManager; import com.facebook.presto.split.PageSinkProvider; import com.facebook.presto.split.PageSourceManager; import com.facebook.presto.split.PageSourceProvider; import com.facebook.presto.split.SplitManager; import com.facebook.presto.sql.Serialization.ExpressionDeserializer; import com.facebook.presto.sql.Serialization.ExpressionSerializer; import com.facebook.presto.sql.Serialization.FunctionCallDeserializer; import com.facebook.presto.sql.analyzer.FeaturesConfig; import com.facebook.presto.sql.gen.ExpressionCompiler; import com.facebook.presto.sql.gen.JoinCompiler; import com.facebook.presto.sql.gen.JoinFilterFunctionCompiler; import com.facebook.presto.sql.gen.JoinProbeCompiler; import com.facebook.presto.sql.gen.OrderingCompiler; import com.facebook.presto.sql.gen.PageFunctionCompiler; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.parser.SqlParserOptions; import com.facebook.presto.sql.planner.CompilerConfig; import com.facebook.presto.sql.planner.LocalExecutionPlanner; import com.facebook.presto.sql.planner.NodePartitioningManager; import com.facebook.presto.sql.planner.PlanOptimizers; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.transaction.ForTransactionManager; import com.facebook.presto.transaction.TransactionManager; import com.facebook.presto.transaction.TransactionManagerConfig; import com.facebook.presto.type.TypeDeserializer; import com.facebook.presto.type.TypeRegistry; import com.facebook.presto.util.FinalizerService; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.inject.Binder; import com.google.inject.Provides; import com.google.inject.Scopes; import com.google.inject.TypeLiteral; import io.airlift.concurrent.BoundedExecutor; import io.airlift.configuration.AbstractConfigurationAwareModule; import io.airlift.discovery.client.ServiceDescriptor; import io.airlift.http.client.HttpClientConfig; import io.airlift.slice.Slice; import io.airlift.stats.PauseMeter; import io.airlift.units.DataSize; import io.airlift.units.Duration; import javax.annotation.PreDestroy; import javax.inject.Inject; import javax.inject.Singleton; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import static com.facebook.presto.execution.scheduler.NodeSchedulerConfig.NetworkTopologyType.FLAT; import static com.facebook.presto.execution.scheduler.NodeSchedulerConfig.NetworkTopologyType.LEGACY; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Strings.nullToEmpty; import static com.google.common.reflect.Reflection.newProxy; import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.configuration.ConditionalModule.installModuleIf; import static io.airlift.configuration.ConfigBinder.configBinder; import static io.airlift.discovery.client.DiscoveryBinder.discoveryBinder; import static io.airlift.http.client.HttpClientBinder.httpClientBinder; import static io.airlift.jaxrs.JaxrsBinder.jaxrsBinder; import static io.airlift.json.JsonBinder.jsonBinder; import static io.airlift.json.JsonCodecBinder.jsonCodecBinder; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; import static java.util.concurrent.TimeUnit.SECONDS; import static org.weakref.jmx.guice.ExportBinder.newExporter; public class ServerMainModule extends AbstractConfigurationAwareModule { private final SqlParserOptions sqlParserOptions; public ServerMainModule(SqlParserOptions sqlParserOptions) { this.sqlParserOptions = requireNonNull(sqlParserOptions, "sqlParserOptions is null"); } @Override protected void setup(Binder binder) { ServerConfig serverConfig = buildConfigObject(ServerConfig.class); if (serverConfig.isCoordinator()) { install(new CoordinatorModule()); binder.bind(new TypeLiteral<Optional<QueryPerformanceFetcher>>() {}).toProvider(QueryPerformanceFetcherProvider.class).in(Scopes.SINGLETON); } else { binder.bind(new TypeLiteral<Optional<QueryPerformanceFetcher>>() {}).toInstance(Optional.empty()); // Install no-op resource group manager on workers, since only coordinators manage resource groups. binder.bind(ResourceGroupManager.class).to(NoOpResourceGroupManager.class).in(Scopes.SINGLETON); // HACK: this binding is needed by SystemConnectorModule, but will only be used on the coordinator binder.bind(QueryManager.class).toInstance(newProxy(QueryManager.class, (proxy, method, args) -> { throw new UnsupportedOperationException(); })); } InternalCommunicationConfig internalCommunicationConfig = buildConfigObject(InternalCommunicationConfig.class); configBinder(binder).bindConfigGlobalDefaults(HttpClientConfig.class, config -> { config.setKeyStorePath(internalCommunicationConfig.getKeyStorePath()); config.setKeyStorePassword(internalCommunicationConfig.getKeyStorePassword()); }); configBinder(binder).bindConfig(FeaturesConfig.class); binder.bind(SqlParser.class).in(Scopes.SINGLETON); binder.bind(SqlParserOptions.class).toInstance(sqlParserOptions); bindFailureDetector(binder, serverConfig.isCoordinator()); jaxrsBinder(binder).bind(ThrowableMapper.class); configBinder(binder).bindConfig(QueryManagerConfig.class); jsonCodecBinder(binder).bindJsonCodec(ViewDefinition.class); // session properties binder.bind(SessionPropertyManager.class).in(Scopes.SINGLETON); binder.bind(SystemSessionProperties.class).in(Scopes.SINGLETON); // schema properties binder.bind(SchemaPropertyManager.class).in(Scopes.SINGLETON); // table properties binder.bind(TablePropertyManager.class).in(Scopes.SINGLETON); // node manager discoveryBinder(binder).bindSelector("presto"); binder.bind(DiscoveryNodeManager.class).in(Scopes.SINGLETON); binder.bind(InternalNodeManager.class).to(DiscoveryNodeManager.class).in(Scopes.SINGLETON); newExporter(binder).export(DiscoveryNodeManager.class).withGeneratedName(); httpClientBinder(binder).bindHttpClient("node-manager", ForNodeManager.class) .withTracing() .withConfigDefaults(config -> { config.setIdleTimeout(new Duration(30, SECONDS)); config.setRequestTimeout(new Duration(10, SECONDS)); }); // node scheduler // TODO: remove from NodePartitioningManager and move to CoordinatorModule configBinder(binder).bindConfig(NodeSchedulerConfig.class); binder.bind(NodeScheduler.class).in(Scopes.SINGLETON); binder.bind(NodeSchedulerExporter.class).in(Scopes.SINGLETON); binder.bind(NodeTaskMap.class).in(Scopes.SINGLETON); newExporter(binder).export(NodeScheduler.class).withGeneratedName(); // network topology // TODO: move to CoordinatorModule when NodeScheduler is moved install(installModuleIf( NodeSchedulerConfig.class, config -> LEGACY.equalsIgnoreCase(config.getNetworkTopology()), moduleBinder -> moduleBinder.bind(NetworkTopology.class).to(LegacyNetworkTopology.class).in(Scopes.SINGLETON))); install(installModuleIf( NodeSchedulerConfig.class, config -> FLAT.equalsIgnoreCase(config.getNetworkTopology()), moduleBinder -> moduleBinder.bind(NetworkTopology.class).to(FlatNetworkTopology.class).in(Scopes.SINGLETON))); // task execution jaxrsBinder(binder).bind(TaskResource.class); newExporter(binder).export(TaskResource.class).withGeneratedName(); binder.bind(TaskManager.class).to(SqlTaskManager.class).in(Scopes.SINGLETON); // workaround for CodeCache GC issue if (JavaVersion.current().getMajor() == 8) { configBinder(binder).bindConfig(CodeCacheGcConfig.class); binder.bind(CodeCacheGcTrigger.class).in(Scopes.SINGLETON); } // Add monitoring for JVM pauses binder.bind(PauseMeter.class).in(Scopes.SINGLETON); newExporter(binder).export(PauseMeter.class).withGeneratedName(); configBinder(binder).bindConfig(MemoryManagerConfig.class); configBinder(binder).bindConfig(NodeMemoryConfig.class); configBinder(binder).bindConfig(ReservedSystemMemoryConfig.class); binder.bind(LocalMemoryManager.class).in(Scopes.SINGLETON); binder.bind(LocalMemoryManagerExporter.class).in(Scopes.SINGLETON); newExporter(binder).export(TaskManager.class).withGeneratedName(); binder.bind(TaskExecutor.class).in(Scopes.SINGLETON); newExporter(binder).export(TaskExecutor.class).withGeneratedName(); binder.bind(LocalExecutionPlanner.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(CompilerConfig.class); binder.bind(ExpressionCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(ExpressionCompiler.class).withGeneratedName(); binder.bind(PageFunctionCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(PageFunctionCompiler.class).withGeneratedName(); configBinder(binder).bindConfig(TaskManagerConfig.class); binder.bind(IndexJoinLookupStats.class).in(Scopes.SINGLETON); newExporter(binder).export(IndexJoinLookupStats.class).withGeneratedName(); binder.bind(AsyncHttpExecutionMBean.class).in(Scopes.SINGLETON); newExporter(binder).export(AsyncHttpExecutionMBean.class).withGeneratedName(); binder.bind(JoinFilterFunctionCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(JoinFilterFunctionCompiler.class).withGeneratedName(); binder.bind(JoinCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(JoinCompiler.class).withGeneratedName(); binder.bind(OrderingCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(OrderingCompiler.class).withGeneratedName(); binder.bind(PagesIndex.Factory.class).to(PagesIndex.DefaultFactory.class); binder.bind(JoinProbeCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(JoinProbeCompiler.class).withGeneratedName(); binder.bind(LookupJoinOperators.class).in(Scopes.SINGLETON); jsonCodecBinder(binder).bindJsonCodec(TaskStatus.class); jsonCodecBinder(binder).bindJsonCodec(StageInfo.class); jsonCodecBinder(binder).bindJsonCodec(TaskInfo.class); jaxrsBinder(binder).bind(PagesResponseWriter.class); // exchange client binder.bind(new TypeLiteral<ExchangeClientSupplier>() {}).to(ExchangeClientFactory.class).in(Scopes.SINGLETON); httpClientBinder(binder).bindHttpClient("exchange", ForExchange.class) .withTracing() .withConfigDefaults(config -> { config.setIdleTimeout(new Duration(30, SECONDS)); config.setRequestTimeout(new Duration(10, SECONDS)); config.setMaxConnectionsPerServer(250); config.setMaxContentLength(new DataSize(32, MEGABYTE)); }); configBinder(binder).bindConfig(ExchangeClientConfig.class); binder.bind(ExchangeExecutionMBean.class).in(Scopes.SINGLETON); newExporter(binder).export(ExchangeExecutionMBean.class).withGeneratedName(); // execution binder.bind(LocationFactory.class).to(HttpLocationFactory.class).in(Scopes.SINGLETON); // memory manager jaxrsBinder(binder).bind(MemoryResource.class); jsonCodecBinder(binder).bindJsonCodec(MemoryInfo.class); jsonCodecBinder(binder).bindJsonCodec(MemoryPoolAssignmentsRequest.class); // transaction manager configBinder(binder).bindConfig(TransactionManagerConfig.class); // data stream provider binder.bind(PageSourceManager.class).in(Scopes.SINGLETON); binder.bind(PageSourceProvider.class).to(PageSourceManager.class).in(Scopes.SINGLETON); // page sink provider binder.bind(PageSinkManager.class).in(Scopes.SINGLETON); binder.bind(PageSinkProvider.class).to(PageSinkManager.class).in(Scopes.SINGLETON); // metadata binder.bind(StaticCatalogStore.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(StaticCatalogStoreConfig.class); binder.bind(MetadataManager.class).in(Scopes.SINGLETON); binder.bind(Metadata.class).to(MetadataManager.class).in(Scopes.SINGLETON); // statistics calculator binder.bind(CostCalculator.class).to(CoefficientBasedCostCalculator.class).in(Scopes.SINGLETON); // type binder.bind(TypeRegistry.class).in(Scopes.SINGLETON); binder.bind(TypeManager.class).to(TypeRegistry.class).in(Scopes.SINGLETON); jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class); newSetBinder(binder, Type.class); // split manager binder.bind(SplitManager.class).in(Scopes.SINGLETON); // node partitioning manager binder.bind(NodePartitioningManager.class).in(Scopes.SINGLETON); // index manager binder.bind(IndexManager.class).in(Scopes.SINGLETON); // handle resolver binder.install(new HandleJsonModule()); // connector binder.bind(ConnectorManager.class).in(Scopes.SINGLETON); // system connector binder.install(new SystemConnectorModule()); // splits jsonCodecBinder(binder).bindJsonCodec(TaskUpdateRequest.class); jsonCodecBinder(binder).bindJsonCodec(ConnectorSplit.class); jsonBinder(binder).addSerializerBinding(Slice.class).to(SliceSerializer.class); jsonBinder(binder).addDeserializerBinding(Slice.class).to(SliceDeserializer.class); jsonBinder(binder).addSerializerBinding(Expression.class).to(ExpressionSerializer.class); jsonBinder(binder).addDeserializerBinding(Expression.class).to(ExpressionDeserializer.class); jsonBinder(binder).addDeserializerBinding(FunctionCall.class).to(FunctionCallDeserializer.class); // query monitor configBinder(binder).bindConfig(QueryMonitorConfig.class); binder.bind(QueryMonitor.class).in(Scopes.SINGLETON); // Determine the NodeVersion String prestoVersion = serverConfig.getPrestoVersion(); if (prestoVersion == null) { prestoVersion = getClass().getPackage().getImplementationVersion(); } checkState(prestoVersion != null, "presto.version must be provided when it cannot be automatically determined"); NodeVersion nodeVersion = new NodeVersion(prestoVersion); binder.bind(NodeVersion.class).toInstance(nodeVersion); // presto announcement discoveryBinder(binder).bindHttpAnnouncement("presto") .addProperty("node_version", nodeVersion.toString()) .addProperty("coordinator", String.valueOf(serverConfig.isCoordinator())) .addProperty("connectorIds", nullToEmpty(serverConfig.getDataSources())); // server info resource jaxrsBinder(binder).bind(ServerInfoResource.class); // plugin manager binder.bind(PluginManager.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(PluginManagerConfig.class); binder.bind(CatalogManager.class).in(Scopes.SINGLETON); // optimizers binder.bind(PlanOptimizers.class).in(Scopes.SINGLETON); // block encodings binder.bind(BlockEncodingManager.class).in(Scopes.SINGLETON); binder.bind(BlockEncodingSerde.class).to(BlockEncodingManager.class).in(Scopes.SINGLETON); newSetBinder(binder, new TypeLiteral<BlockEncodingFactory<?>>() {}); jsonBinder(binder).addSerializerBinding(Block.class).to(BlockJsonSerde.Serializer.class); jsonBinder(binder).addDeserializerBinding(Block.class).to(BlockJsonSerde.Deserializer.class); // thread visualizer jaxrsBinder(binder).bind(ThreadResource.class); // PageSorter binder.bind(PageSorter.class).to(PagesIndexPageSorter.class).in(Scopes.SINGLETON); // PageIndexer binder.bind(PageIndexerFactory.class).to(GroupByHashPageIndexerFactory.class).in(Scopes.SINGLETON); // Finalizer binder.bind(FinalizerService.class).in(Scopes.SINGLETON); // Spiller binder.bind(SpillerFactory.class).to(GenericSpillerFactory.class).in(Scopes.SINGLETON); binder.bind(SingleStreamSpillerFactory.class).to(FileSingleStreamSpillerFactory.class).in(Scopes.SINGLETON); binder.bind(SpillerStats.class).in(Scopes.SINGLETON); newExporter(binder).export(SpillerFactory.class).withGeneratedName(); binder.bind(LocalSpillManager.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(NodeSpillConfig.class); // cleanup binder.bind(ExecutorCleanup.class).in(Scopes.SINGLETON); } @Provides @Singleton @ForExchange public static ScheduledExecutorService createExchangeExecutor(ExchangeClientConfig config) { return newScheduledThreadPool(config.getClientThreads(), daemonThreadsNamed("exchange-client-%s")); } @Provides @Singleton @ForAsyncHttp public static ExecutorService createAsyncHttpResponseCoreExecutor() { return newCachedThreadPool(daemonThreadsNamed("async-http-response-%s")); } @Provides @Singleton @ForAsyncHttp public static BoundedExecutor createAsyncHttpResponseExecutor(@ForAsyncHttp ExecutorService coreExecutor, TaskManagerConfig config) { return new BoundedExecutor(coreExecutor, config.getHttpResponseThreads()); } @Provides @Singleton @ForAsyncHttp public static ScheduledExecutorService createAsyncHttpTimeoutExecutor(TaskManagerConfig config) { return newScheduledThreadPool(config.getHttpTimeoutThreads(), daemonThreadsNamed("async-http-timeout-%s")); } @Provides @Singleton @ForTransactionManager public static ScheduledExecutorService createTransactionIdleCheckExecutor() { return newSingleThreadScheduledExecutor(daemonThreadsNamed("transaction-idle-check")); } @Provides @Singleton @ForTransactionManager public static ExecutorService createTransactionFinishingExecutor() { return newCachedThreadPool(daemonThreadsNamed("transaction-finishing-%s")); } @Provides @Singleton public static TransactionManager createTransactionManager( TransactionManagerConfig config, CatalogManager catalogManager, @ForTransactionManager ScheduledExecutorService idleCheckExecutor, @ForTransactionManager ExecutorService finishingExecutor) { return TransactionManager.create(config, idleCheckExecutor, catalogManager, finishingExecutor); } private static void bindFailureDetector(Binder binder, boolean coordinator) { // TODO: this is a hack until the coordinator module works correctly if (coordinator) { binder.install(new FailureDetectorModule()); jaxrsBinder(binder).bind(NodeResource.class); } else { binder.bind(FailureDetector.class).toInstance(new FailureDetector() { @Override public Set<ServiceDescriptor> getFailed() { return ImmutableSet.of(); } @Override public State getState(HostAddress hostAddress) { // failure detector is not available on workers return State.UNKNOWN; } }); } } public static class ExecutorCleanup { private final List<ExecutorService> executors; @Inject public ExecutorCleanup( @ForExchange ScheduledExecutorService exchangeExecutor, @ForAsyncHttp ExecutorService httpResponseExecutor, @ForAsyncHttp ScheduledExecutorService httpTimeoutExecutor, @ForTransactionManager ExecutorService transactionFinishingExecutor, @ForTransactionManager ScheduledExecutorService transactionIdleExecutor) { executors = ImmutableList.of( exchangeExecutor, httpResponseExecutor, httpTimeoutExecutor, transactionFinishingExecutor, transactionIdleExecutor); } @PreDestroy public void shutdown() { executors.forEach(ExecutorService::shutdownNow); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.alibaba.weex.uitest.TC_Downgrade; import android.app.Activity; import android.app.Application; import android.app.Instrumentation; import android.content.Intent; import android.test.ActivityInstrumentationTestCase2; import android.test.TouchUtils; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.ViewGroup; import com.alibaba.weex.R; import com.alibaba.weex.util.ScreenShot; import com.alibaba.weex.WXPageActivity; import com.alibaba.weex.WeappJsBaseTestCase; import com.alibaba.weex.constants.Constants; import com.alibaba.weex.util.ViewUtil; import com.taobao.weex.ui.component.WXText; import com.taobao.weex.ui.view.WXTextView; import java.io.IOException; import java.util.ArrayList; /** * Created by admin on 16/3/23. */ public class WeexUiTestCaseTcDowngradeAppVFalse extends ActivityInstrumentationTestCase2<WXPageActivity>{ public final String TAG = "TestScript_Guide=="; public WeappJsBaseTestCase weappApplication; public WXPageActivity waTestPageActivity; public ViewGroup mViewGroup; public Application mApplication; public Instrumentation mInstrumentation; public ArrayList<View> mCaseListIndexView = new ArrayList<View>(); public WeexUiTestCaseTcDowngradeAppVFalse() { super(WXPageActivity.class); } public void setUp() throws Exception{ Log.e("TestScript_Guide", "setUp into!!"); setActivityInitialTouchMode(false); mInstrumentation = getInstrumentation(); Intent intent = new Intent(); intent.putExtra("bundleUrl", Constants.BUNDLE_URL); launchActivityWithIntent("com.alibaba.weex", WXPageActivity.class, intent); setActivity(WXPageActivity.wxPageActivityInstance); waTestPageActivity = getActivity(); // waTestPageActivity.getIntent().getData().toString(); Log.e(TAG,"activity1=" + waTestPageActivity.toString() ); sleep(3000); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, "TC_"); addAllTargetView("TC_"); Thread.sleep(3000); } public void testDowngrade(){ for(final View caseView : mCaseListIndexView){ if (((WXTextView)caseView).getText().toString().equals("TC_Downgrade")){ Log.e(TAG, "TC_Downgrade find"); final WXTextView inputView = (WXTextView)caseView; mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputView.requestFocus(); inputView.performClick(); } }); sleep(2000); setActivity(WXPageActivity.wxPageActivityInstance); Activity activity2 = getActivity(); Log.e(TAG, "activity2 = " + activity2.toString()); ViewGroup myGroup = (ViewGroup)(activity2.findViewById(R.id.container)); Log.e(TAG, myGroup.toString()); ArrayList<View> inputListView = new ArrayList<View>(); inputListView = ViewUtil.findViewWithText(myGroup, "TC_Downgrade_appV_False"); Log.e(TAG, "TC_Downgrade_appV_False== " + inputListView.size()); sleep(2000); if(inputListView.size()!=0){ final WXTextView inputTypeView = (WXTextView)inputListView.get(0); mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputTypeView.requestFocus(); inputTypeView.performClick(); Log.e(TAG, "TC_Downgrade_appV_False clcik!"); } }); sleep(3000); screenShot("TC_Downgrade_appV_False"); // ScreenShot.takeScreenShotIncludeDialog(getActivity(), "TC_Downgrade_appV_False"); Log.e(TAG, "TC_Downgrade_appV_False snap!"); } } } } /** * get tc list by text * @param byText * @return * @throws InterruptedException */ public ArrayList<View> getTestCaseListViewByText(String byText) throws InterruptedException { Log.e("TestScript_Guide", "byText ==" + byText); if(TextUtils.isEmpty(byText)){ return null; } ArrayList<View> outViews = new ArrayList<View>(); mViewGroup.findViewsWithText(outViews, byText, View.FIND_VIEWS_WITH_TEXT); for (View view : outViews){ String viewText = ((WXTextView)view).getText().toString(); Log.e(TAG, "viewText ==" + viewText); } return outViews; } /** * findMyCaseByText */ public View findMyCaseByText(String caseText){ if (mCaseListIndexView.size() == 0) return null; WXTextView view = null; for(int i=0; i<mCaseListIndexView.size();i++){ view = (WXTextView)mCaseListIndexView.get(i); if (view.getText().toString().toLowerCase().contains(caseText.toLowerCase())){ return view; } } return view; } /** * sleep */ public void sleep(long time){ try { Thread.sleep(time); } catch (InterruptedException e) { e.printStackTrace(); } } /** * snapshot */ public void screenShot(String shotName) { try { ScreenShot.shoot(WXPageActivity.wxPageActivityInstance, shotName); } catch (IOException e) { e.printStackTrace(); } } public void setViewGroup(ViewGroup viewGroup){ mViewGroup = viewGroup; } public void addAllTargetView(String target){ int max = 6; int count =0 ; while (count < max){ TouchUtils.dragQuarterScreenUp(this, this.getActivity()); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, target); mCaseListIndexView.addAll(mCaseListIndexView); count ++; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.network.jms; import javax.jms.Connection; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; import javax.jms.TextMessage; import java.util.ArrayList; import java.util.Iterator; import java.util.concurrent.TimeUnit; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.command.ActiveMQTopic; import org.apache.activemq.util.Wait; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * These test cases are used to verify that queue outbound bridge connections get * re-established in all broker restart scenarios. This is possible when the * outbound bridge is configured using the failover URI with a timeout. */ public class TopicOutboundBridgeReconnectTest { private BrokerService producerBroker; private BrokerService consumerBroker; private ActiveMQConnectionFactory producerConnectionFactory; private ActiveMQConnectionFactory consumerConnectionFactory; private Destination destination; private final ArrayList<Connection> connections = new ArrayList<>(); @Test public void testMultipleProducerBrokerRestarts() throws Exception { for (int i = 0; i < 10; i++) { testWithProducerBrokerRestart(); disposeConsumerConnections(); } } @Test public void testWithoutRestartsConsumerFirst() throws Exception { startConsumerBroker(); startProducerBroker(); MessageConsumer consumer = createConsumer(); sendMessage("test123"); sendMessage("test456"); Message message = consumer.receive(2000); assertNotNull(message); assertEquals("test123", ((TextMessage) message).getText()); message = consumer.receive(5000); assertNotNull(message); assertEquals("test456", ((TextMessage) message).getText()); assertNull(consumer.receiveNoWait()); } @Test public void testWithoutRestartsProducerFirst() throws Exception { startProducerBroker(); sendMessage("test123"); startConsumerBroker(); // unless using a failover URI, the first attempt of this send will likely fail, so increase the timeout below // to give the bridge time to recover sendMessage("test456"); MessageConsumer consumer = createConsumer(); Message message = consumer.receive(5000); assertNotNull(message); assertEquals("test123", ((TextMessage) message).getText()); message = consumer.receive(5000); assertNotNull(message); assertEquals("test456", ((TextMessage) message).getText()); assertNull(consumer.receiveNoWait()); } @Test public void testWithProducerBrokerRestart() throws Exception { startProducerBroker(); startConsumerBroker(); MessageConsumer consumer = createConsumer(); sendMessage("test123"); Message message = consumer.receive(5000); assertNotNull(message); assertEquals("test123", ((TextMessage) message).getText()); assertNull(consumer.receiveNoWait()); // Restart the first broker... stopProducerBroker(); startProducerBroker(); sendMessage("test123"); message = consumer.receive(5000); assertNotNull(message); assertEquals("test123", ((TextMessage) message).getText()); assertNull(consumer.receiveNoWait()); } @Test public void testWithConsumerBrokerRestart() throws Exception { startProducerBroker(); startConsumerBroker(); final MessageConsumer consumer1 = createConsumer(); sendMessage("test123"); Message message = consumer1.receive(5000); assertNotNull(message); assertEquals("test123", ((TextMessage) message).getText()); assertNull(consumer1.receiveNoWait()); consumer1.close(); // Restart the first broker... stopConsumerBroker(); startConsumerBroker(); // unless using a failover URI, the first attempt of this send will likely fail, so increase the timeout below // to give the bridge time to recover sendMessage("test123"); final MessageConsumer consumer2 = createConsumer(); assertTrue("Expected recover and delivery failed", Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisified() throws Exception { Message message = consumer2.receiveNoWait(); if (message == null || !((TextMessage) message).getText().equals("test123")) { return false; } return true; } })); assertNull(consumer2.receiveNoWait()); } @Test public void testWithConsumerBrokerStartDelay() throws Exception { startConsumerBroker(); final MessageConsumer consumer = createConsumer(); TimeUnit.SECONDS.sleep(5); startProducerBroker(); sendMessage("test123"); assertTrue("Expected recover and delivery failed", Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisified() throws Exception { Message message = consumer.receiveNoWait(); if (message == null || !((TextMessage) message).getText().equals("test123")) { return false; } return true; } })); assertNull(consumer.receiveNoWait()); } @Test public void testWithProducerBrokerStartDelay() throws Exception { startProducerBroker(); TimeUnit.SECONDS.sleep(5); startConsumerBroker(); MessageConsumer consumer = createConsumer(); sendMessage("test123"); Message message = consumer.receive(2000); assertNotNull(message); assertEquals("test123", ((TextMessage) message).getText()); assertNull(consumer.receiveNoWait()); } @Before public void setUp() throws Exception { producerConnectionFactory = createProducerConnectionFactory(); consumerConnectionFactory = createConsumerConnectionFactory(); destination = new ActiveMQTopic("RECONNECT.TEST.TOPIC"); } @After public void tearDown() throws Exception { disposeConsumerConnections(); try { stopProducerBroker(); } catch (Throwable e) { } try { stopConsumerBroker(); } catch (Throwable e) { } } protected void disposeConsumerConnections() { for (Iterator<Connection> iter = connections.iterator(); iter.hasNext(); ) { Connection connection = iter.next(); try { connection.close(); } catch (Throwable ignore) { } } } protected void startProducerBroker() throws Exception { if (producerBroker == null) { producerBroker = createFirstBroker(); producerBroker.start(); } } protected void stopProducerBroker() throws Exception { if (producerBroker != null) { producerBroker.stop(); producerBroker = null; } } protected void startConsumerBroker() throws Exception { if (consumerBroker == null) { consumerBroker = createSecondBroker(); consumerBroker.start(); } } protected void stopConsumerBroker() throws Exception { if (consumerBroker != null) { consumerBroker.stop(); consumerBroker = null; } } protected BrokerService createFirstBroker() throws Exception { BrokerService broker = new BrokerService(); broker.setBrokerName("broker1"); broker.setPersistent(false); broker.setUseJmx(false); broker.addConnector("tcp://localhost:61616"); broker.addConnector("vm://broker1"); SimpleJmsTopicConnector jmsTopicConnector = new SimpleJmsTopicConnector(); jmsTopicConnector.setOutboundTopicBridges(new OutboundTopicBridge[]{new OutboundTopicBridge("RECONNECT.TEST.TOPIC")}); jmsTopicConnector.setOutboundTopicConnectionFactory(new ActiveMQConnectionFactory("tcp://localhost:61617")); broker.setJmsBridgeConnectors(new JmsConnector[]{jmsTopicConnector}); return broker; } protected BrokerService createSecondBroker() throws Exception { BrokerService broker = new BrokerService(); broker.setBrokerName("broker2"); broker.setPersistent(false); broker.setUseJmx(false); broker.addConnector("tcp://localhost:61617"); broker.addConnector("vm://broker2"); return broker; } protected ActiveMQConnectionFactory createProducerConnectionFactory() { return new ActiveMQConnectionFactory("vm://broker1"); } protected ActiveMQConnectionFactory createConsumerConnectionFactory() { return new ActiveMQConnectionFactory("vm://broker2"); } protected void sendMessage(String text) throws JMSException { Connection connection = null; try { connection = producerConnectionFactory.createConnection(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageProducer producer = session.createProducer(destination); TextMessage message = session.createTextMessage(); message.setText(text); producer.send(message); } finally { try { connection.close(); } catch (Throwable ignore) { } } } protected MessageConsumer createConsumer() throws JMSException { Connection connection = consumerConnectionFactory.createConnection(); connections.add(connection); connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); return session.createConsumer(destination); } }
/** * Copyright (c) 2011 Yahoo! Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. See accompanying LICENSE file. */ package com.yahoo.omid.tso; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import java.io.IOException; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.ExceptionEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.yahoo.omid.client.SyncAbortCompleteCallback; import com.yahoo.omid.client.SyncCommitCallback; import com.yahoo.omid.client.SyncCommitQueryCallback; import com.yahoo.omid.client.SyncCreateCallback; import com.yahoo.omid.client.TSOClient; import com.yahoo.omid.replication.ZipperState; import com.yahoo.omid.tso.messages.AbortedTransactionReport; import com.yahoo.omid.tso.messages.CommitQueryRequest; import com.yahoo.omid.tso.messages.CommitRequest; import com.yahoo.omid.tso.messages.CommitResponse; import com.yahoo.omid.tso.messages.FullAbortRequest; import com.yahoo.omid.tso.messages.TimestampRequest; /** * Example of ChannelHandler for the Transaction Client * * @author maysam * */ public class TestClientHandler extends TSOClient { private static final Logger LOG = LoggerFactory.getLogger(TestClientHandler.class); /** * Return value for the caller */ final BlockingQueue<Boolean> answer = new LinkedBlockingQueue<Boolean>(); final BlockingQueue<TSOMessage> messageQueue = new LinkedBlockingQueue<TSOMessage>(); private Channel channel; // Sends FullAbortReport upon receiving a CommitResponse with committed = // false private boolean autoFullAbort = true; public TestClientHandler(Configuration conf) throws IOException { super(conf); } /** * Method to wait for the final response * * @return success or not */ public boolean waitForAll() { for (;;) { try { return answer.take(); } catch (InterruptedException e) { // Ignore. } } } public void sendMessage(Object msg) throws IOException { LOG.trace("Sending " + msg); if (msg instanceof CommitRequest) { CommitRequest cr = (CommitRequest) msg; commit(cr.startTimestamp, cr.rows, new SyncCommitCallback()); } else if (msg instanceof TimestampRequest) { getNewTimestamp(new SyncCreateCallback()); } else if (msg instanceof CommitQueryRequest) { CommitQueryRequest cqr = (CommitQueryRequest) msg; isCommitted(cqr.startTimestamp, cqr.queryTimestamp, new SyncCommitQueryCallback()); } else if (msg instanceof FullAbortRequest) { FullAbortRequest atr = (FullAbortRequest) msg; completeAbort(atr.startTimestamp, new SyncAbortCompleteCallback()); } } public void clearMessages() { messageQueue.clear(); } public void await() { synchronized(this) { while (channel == null) { try { wait(); } catch (InterruptedException e) { return; } } } } @Override protected void processMessage(TSOMessage msg) { if (msg instanceof CommitResponse) { CommitResponse cr = (CommitResponse) msg; if (!cr.committed && autoFullAbort) { try { completeAbort(cr.startTimestamp, new SyncAbortCompleteCallback()); } catch (IOException e) { LOG.error("Could not send Abort Complete mesagge.", e.getCause()); } } } messageQueue.add(msg); } public void receiveBootstrap() { Object msg = null; receiveMessage(ZipperState.class); // Receive all AbortedTransactionReports while ( (msg = receiveMessage()) instanceof AbortedTransactionReport); messageQueue.add((TSOMessage) msg); } public Object receiveMessage() { try { Object msg = messageQueue.poll(5, TimeUnit.SECONDS); assertNotNull("Reception of message timed out", msg); return msg; } catch (InterruptedException e) { return null; } } @SuppressWarnings("unchecked") public <T extends TSOMessage> T receiveMessage(Class<T> type) { try { TSOMessage msg = messageQueue.poll(5000, TimeUnit.SECONDS); assertNotNull("Reception of message timed out", msg); assertThat(msg, is(type)); return (T) msg; } catch (InterruptedException e) { return null; } } /** * Starts the traffic */ @Override public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) { super.channelConnected(ctx, e); LOG.info("Start sending traffic"); synchronized (this) { this.channel = ctx.getChannel(); notify(); } } /** * When the channel is closed, print result */ @Override public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception { } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { if (e.getCause() instanceof IOException) { LOG.warn("IOException from downstream.", e.getCause()); } else { LOG.warn("Unexpected exception from downstream.", e.getCause()); } // Offer default object answer.offer(false); Channels.close(e.getChannel()); } public boolean isAutoFullAbort() { return autoFullAbort; } public void setAutoFullAbort(boolean autoFullAbort) { this.autoFullAbort = autoFullAbort; } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.contextualsearch; import android.text.TextUtils; import org.chromium.base.CommandLine; import org.chromium.base.SysUtils; import org.chromium.chrome.browser.ChromeSwitches; import org.chromium.chrome.browser.ChromeVersionInfo; import org.chromium.components.variations.VariationsAssociatedData; import java.util.Arrays; import java.util.Locale; /** * Provides Field Trial support for the Contextual Search application within Chrome for Android. */ public class ContextualSearchFieldTrial { private static final String FIELD_TRIAL_NAME = "ContextualSearch"; private static final String ENABLED_PARAM = "enabled"; private static final String ENABLED_VALUE = "true"; private static final String DISABLE_FOR_CJK = "disable_for_cjk"; private static final String DISABLE_FOR_CHINESE = "disable_for_chinese"; private static final String DISABLE_FOR_JAPANESE = "disable_for_japanese"; private static final String DISABLE_FOR_KOREAN = "disable_for_korean"; // TODO(pedrosimonetti): Confirm if we can delete promo_on_longpress_only now. private static final String PROMO_ON_LONGPRESS_ONLY = "promo_on_longpress_only"; static final String PROMO_ON_LIMITED_TAPS = "promo_on_limited_taps"; static final String TAP_TRIGGERED_PROMO_LIMIT = "tap_triggered_promo_limit"; static final String TAP_RESOLVE_LIMIT_FOR_DECIDED = "tap_resolve_limit_for_decided"; static final String TAP_PREFETCH_LIMIT_FOR_DECIDED = "tap_prefetch_limit_for_decided"; static final String TAP_RESOLVE_LIMIT_FOR_UNDECIDED = "tap_resolve_limit_for_undecided"; static final String TAP_PREFETCH_LIMIT_FOR_UNDECIDED = "tap_prefetch_limit_for_undecided"; static final String SELECTION_EXPANSION_DISABLED = "contextual_search_selection_expansion_disabled"; static final String NARROW_PANEL_SUPPORTED = "contextual_search_narrow_panel_supported"; private static final String CHINESE_LANGUAGE_CODE = "zh"; private static final String JAPANESE_LANGUAGE_CODE = "ja"; private static final String KOREAN_LANGUAGE_CODE = "ko"; private static final String[] CJK_LANGUAGE_CODES = {CHINESE_LANGUAGE_CODE, JAPANESE_LANGUAGE_CODE, KOREAN_LANGUAGE_CODE}; // The default navigation-detection-delay in milliseconds. private static final int DEFAULT_TAP_NAVIGATION_DETECTION_DELAY = 16; private static final String NAVIGATION_DETECTION_DELAY = "tap_navigation_detection_delay"; private static final int UNLIMITED_TAPS = -1; private static final int DEFAULT_TAP_RESOLVE_LIMIT_FOR_DECIDED = UNLIMITED_TAPS; private static final int DEFAULT_TAP_PREFETCH_LIMIT_FOR_DECIDED = UNLIMITED_TAPS; private static final int DEFAULT_TAP_RESOLVE_LIMIT_FOR_UNDECIDED = 100; private static final int DEFAULT_TAP_PREFETCH_LIMIT_FOR_UNDECIDED = 10; // Cached value to avoid repeated and redundant JNI operations. private static Boolean sEnabled; private static Boolean sSelectionExpansionDisabled; private static Boolean sNarrowPanelSupported; /** * Don't instantiate. */ private ContextualSearchFieldTrial() {} /** * Checks the current Variations parameters associated with the active group as well as the * Chrome preference to determine if the service is enabled. * @return Whether Contextual Search is enabled or not. */ public static boolean isEnabled() { if (sEnabled == null) { sEnabled = detectEnabled(); } return sEnabled.booleanValue(); } private static boolean detectEnabled() { if (SysUtils.isLowEndDevice()) { return false; } // This is used for instrumentation tests (i.e. it is not a user-flippable flag). We cannot // use Variations params because in the test harness, the initialization comes before any // native methods are available. And the ContextualSearchManager is initialized very early // in the Chrome initialization. if (CommandLine.getInstance().hasSwitch( ChromeSwitches.ENABLE_CONTEXTUAL_SEARCH_FOR_TESTING)) { return true; } // Allow this user-flippable flag to disable the feature. if (CommandLine.getInstance().hasSwitch(ChromeSwitches.DISABLE_CONTEXTUAL_SEARCH)) { return false; } // Allow this user-flippable flag to override disabling due to language. if (CommandLine.getInstance().hasSwitch(ChromeSwitches.ENABLE_CONTEXTUAL_SEARCH)) { return true; } String languageCode = Locale.getDefault().getLanguage(); if (!isLanguageSupported(languageCode)) return false; if (ChromeVersionInfo.isLocalBuild()) return true; return getBooleanParam(ENABLED_PARAM); } /** * @param languageCode The language code of the system. * @return Whether the language is supported, given the language code. */ static boolean isLanguageSupported(String languageCode) { if (Arrays.asList(CJK_LANGUAGE_CODES).contains(languageCode) && getBooleanParam(DISABLE_FOR_CJK)) { return false; } if (languageCode.equals(CHINESE_LANGUAGE_CODE) && getBooleanParam(DISABLE_FOR_CHINESE)) { return false; } if (languageCode.equals(JAPANESE_LANGUAGE_CODE) && getBooleanParam(DISABLE_FOR_JAPANESE)) { return false; } if (languageCode.equals(KOREAN_LANGUAGE_CODE) && getBooleanParam(DISABLE_FOR_KOREAN)) { return false; } return true; } /** * Gets whether the promo should be triggered on longpress only. * @return {@code true} iff Finch says we should trigger the promo only on touch-and-hold. */ static boolean isPromoLongpressTriggeredOnly() { return getBooleanParam(PROMO_ON_LONGPRESS_ONLY); } /** * @return Whether the promo should be triggered by a limited number of taps. */ public static boolean isPromoLimitedByTapCounts() { return getBooleanParam(PROMO_ON_LIMITED_TAPS); } /** * @return The maximum number of times the promo can be triggered by a tap, or * {@code ContextualSearchUma#PROMO_TAPS_REMAINING_INVALID} if no value is present in the finch * configuration. */ static int getPromoTapTriggeredLimit() { return getIntParamValueOrDefault(TAP_TRIGGERED_PROMO_LIMIT, UNLIMITED_TAPS); } /** * @return The delay to use for navigation-detection when triggering on a Tap. */ static int getNavigationDetectionDelay() { return getIntParamValueOrDefault(NAVIGATION_DETECTION_DELAY, DEFAULT_TAP_NAVIGATION_DETECTION_DELAY); } /** * @return Whether Search Term Resolution in response to a Tap gesture is limited for decided * users. */ static boolean isTapResolveLimitedForDecided() { return getTapResolveLimitForDecided() != ContextualSearchFieldTrial.UNLIMITED_TAPS; } /** * @return Whether prefetch in response to a Tap gesture is limited for decided users. */ static boolean isTapPrefetchLimitedForDecided() { return getTapPrefetchLimitForDecided() != ContextualSearchFieldTrial.UNLIMITED_TAPS; } /** * @return Whether Search Term Resolution in response to a Tap gesture is limited for undecided * users. */ static boolean isTapResolveLimitedForUndecided() { return getTapResolveLimitForUndecided() != ContextualSearchFieldTrial.UNLIMITED_TAPS; } /** * @return Whether prefetch in response to a Tap gesture is limited for undecided users. */ static boolean isTapPrefetchLimitedForUndecided() { return getTapPrefetchLimitForUndecided() != ContextualSearchFieldTrial.UNLIMITED_TAPS; } /** * @return The limit on the number of taps to resolve for decided users, or the default if no * value is present in the Finch configuration. */ static int getTapResolveLimitForDecided() { return getIntParamValueOrDefault(TAP_RESOLVE_LIMIT_FOR_DECIDED, DEFAULT_TAP_RESOLVE_LIMIT_FOR_DECIDED); } /** * @return The limit on the number of prefetches to issue for decided users, or the default * if no value is present. */ static int getTapPrefetchLimitForDecided() { return getIntParamValueOrDefault(TAP_PREFETCH_LIMIT_FOR_DECIDED, DEFAULT_TAP_PREFETCH_LIMIT_FOR_DECIDED); } /** * @return The limit on the number of taps to resolve for undecided users, or the default if no * value is present in the Finch configuration. */ static int getTapResolveLimitForUndecided() { return getIntParamValueOrDefault(TAP_RESOLVE_LIMIT_FOR_UNDECIDED, DEFAULT_TAP_RESOLVE_LIMIT_FOR_UNDECIDED); } /** * @return The limit on the number of prefetches to issue for undecided users, or the default * if no value is present. */ static int getTapPrefetchLimitForUndecided() { return getIntParamValueOrDefault(TAP_PREFETCH_LIMIT_FOR_UNDECIDED, DEFAULT_TAP_PREFETCH_LIMIT_FOR_UNDECIDED); } // -------------------------------------------------------------------------------------------- // Experimental UI Features. // -------------------------------------------------------------------------------------------- /** * @return Whether the base page selection expansion after server response is disabled. */ public static boolean isSelectionExpansionDisabled() { if (sSelectionExpansionDisabled == null) { sSelectionExpansionDisabled = getBooleanParam(SELECTION_EXPANSION_DISABLED); } return sSelectionExpansionDisabled.booleanValue(); } /** * @return Whether the narrow version of the Search Panel is supported. */ public static boolean isNarrowPanelSupported() { if (sNarrowPanelSupported == null) { sNarrowPanelSupported = getBooleanParam(NARROW_PANEL_SUPPORTED); } return sNarrowPanelSupported.booleanValue(); } // -------------------------------------------------------------------------------------------- // Helpers. // -------------------------------------------------------------------------------------------- /** * Gets a boolean Finch parameter, assuming the <paramName>="true" format. Also checks for a * command-line switch with the same name, for easy local testing. * @param paramName The name of the Finch parameter (or command-line switch) to get a value for. * @return Whether the Finch param is defined with a value "true", if there's a command-line * flag present with any value. */ private static boolean getBooleanParam(String paramName) { if (CommandLine.getInstance().hasSwitch(paramName)) { return true; } return TextUtils.equals(ENABLED_VALUE, VariationsAssociatedData.getVariationParamValue(FIELD_TRIAL_NAME, paramName)); } /** * Returns an integer value for a Finch parameter, or the default value if no parameter exists * in the current configuration. Also checks for a command-line switch with the same name. * @param paramName The name of the Finch parameter (or command-line switch) to get a value for. * @param defaultValue The default value to return when there's no param or switch. * @return An integer value -- either the param or the default. */ private static int getIntParamValueOrDefault(String paramName, int defaultValue) { String value = CommandLine.getInstance().getSwitchValue(paramName); if (TextUtils.isEmpty(value)) { value = VariationsAssociatedData.getVariationParamValue(FIELD_TRIAL_NAME, paramName); } if (!TextUtils.isEmpty(value)) { try { return Integer.parseInt(value); } catch (NumberFormatException e) { return defaultValue; } } return defaultValue; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.model; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.keycloak.common.util.Time; import org.keycloak.models.AuthenticatedClientSessionModel; import org.keycloak.models.ClientModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserManager; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionModel; import org.keycloak.models.session.UserSessionPersisterProvider; import org.keycloak.models.sessions.infinispan.changes.sessions.PersisterLastSessionRefreshStoreFactory; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.models.utils.ResetTimeOffsetEvent; import org.keycloak.protocol.oidc.OIDCLoginProtocol; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.services.managers.ClientManager; import org.keycloak.services.managers.RealmManager; import org.keycloak.services.managers.UserSessionManager; import org.keycloak.testsuite.AbstractTestRealmKeycloakTest; import org.keycloak.testsuite.arquillian.annotation.AuthServerContainerExclude; import org.keycloak.testsuite.arquillian.annotation.ModelTest; import org.keycloak.timer.TimerProvider; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.keycloak.testsuite.arquillian.annotation.AuthServerContainerExclude.AuthServer; /** * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ @AuthServerContainerExclude(AuthServer.REMOTE) public class UserSessionProviderOfflineTest extends AbstractTestRealmKeycloakTest { private static KeycloakSession currentSession; private static RealmModel realm; private static UserSessionManager sessionManager; private static UserSessionPersisterProvider persister; @Before public void before() { testingClient.server().run(session -> { KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionBefore) -> { reloadState(sessionBefore, true); persister = sessionBefore.getProvider(UserSessionPersisterProvider.class); }); }); } @After public void after() { testingClient.server().run(session -> { RealmModel realm = session.realms().getRealmByName("test"); session.sessions().removeUserSessions(realm); UserModel user1 = session.users().getUserByUsername("user1", realm); UserModel user2 = session.users().getUserByUsername("user2", realm); UserManager um = new UserManager(session); if (user1 != null) { um.removeUser(realm, user1); } if (user2 != null) { um.removeUser(realm, user2); } }); } @Test @ModelTest public void testOfflineSessionsCrud(KeycloakSession session) { Map<String, Set<String>> offlineSessions = new HashMap<>(); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCrud) -> { // Create some online sessions in infinispan reloadState(sessionCrud); createSessions(sessionCrud); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCrud2) -> { currentSession = sessionCrud2; realm = currentSession.realms().getRealm("test"); sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); // Key is userSession ID, values are client UUIDS // Persist 3 created userSessions and clientSessions as offline ClientModel testApp = realm.getClientByClientId("test-app"); List<UserSessionModel> userSessions = currentSession.sessions().getUserSessions(realm, testApp); for (UserSessionModel userSession : userSessions) { offlineSessions.put(userSession.getId(), createOfflineSessionIncludeClientSessions(currentSession, userSession)); } }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCrud3) -> { currentSession = sessionCrud3; realm = currentSession.realms().getRealm("test"); sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); // Assert all previously saved offline sessions found for (Map.Entry<String, Set<String>> entry : offlineSessions.entrySet()) { UserSessionModel offlineSession = sessionManager.findOfflineUserSession(realm, entry.getKey()); Assert.assertNotNull(offlineSession); Assert.assertEquals(offlineSession.getAuthenticatedClientSessions().keySet(), entry.getValue()); } // Find clients with offline token UserModel user1 = currentSession.users().getUserByUsername("user1", realm); Set<ClientModel> clients = sessionManager.findClientsWithOfflineToken(realm, user1); Assert.assertEquals(clients.size(), 2); for (ClientModel client : clients) { Assert.assertTrue(client.getClientId().equals("test-app") || client.getClientId().equals("third-party")); } UserModel user2 = currentSession.users().getUserByUsername("user2", realm); clients = sessionManager.findClientsWithOfflineToken(realm, user2); Assert.assertEquals(clients.size(), 1); Assert.assertEquals("test-app", clients.iterator().next().getClientId()); // Test count ClientModel testApp = realm.getClientByClientId("test-app"); ClientModel thirdparty = realm.getClientByClientId("third-party"); Assert.assertEquals(3, currentSession.sessions().getOfflineSessionsCount(realm, testApp)); Assert.assertEquals(1, currentSession.sessions().getOfflineSessionsCount(realm, thirdparty)); // Revoke "test-app" for user1 sessionManager.revokeOfflineToken(user1, testApp); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCrud4) -> { currentSession = sessionCrud4; realm = currentSession.realms().getRealm("test"); sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); // Assert userSession revoked ClientModel testApp = realm.getClientByClientId("test-app"); ClientModel thirdparty = realm.getClientByClientId("third-party"); // Still 2 sessions. The count of sessions by client may not be accurate after revoke due the // performance optimizations (the "127.0.0.1" currentSession still has another client "thirdparty" in it) Assert.assertEquals(2, currentSession.sessions().getOfflineSessionsCount(realm, testApp)); Assert.assertEquals(1, currentSession.sessions().getOfflineSessionsCount(realm, thirdparty)); List<UserSessionModel> thirdpartySessions = currentSession.sessions().getOfflineUserSessions(realm, thirdparty, 0, 10); Assert.assertEquals(1, thirdpartySessions.size()); Assert.assertEquals("127.0.0.1", thirdpartySessions.get(0).getIpAddress()); Assert.assertEquals("user1", thirdpartySessions.get(0).getUser().getUsername()); UserModel user1 = currentSession.users().getUserByUsername("user1", realm); UserModel user2 = currentSession.users().getUserByUsername("user2", realm); Set<ClientModel> clients = sessionManager.findClientsWithOfflineToken(realm, user1); Assert.assertEquals(1, clients.size()); Assert.assertEquals("third-party", clients.iterator().next().getClientId()); clients = sessionManager.findClientsWithOfflineToken(realm, user2); Assert.assertEquals(1, clients.size()); Assert.assertEquals("test-app", clients.iterator().next().getClientId()); // Revoke the second currentSession for user1 too. sessionManager.revokeOfflineToken(user1, thirdparty); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCrud5) -> { currentSession = sessionCrud5; realm = currentSession.realms().getRealm("test"); sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); ClientModel testApp = realm.getClientByClientId("test-app"); ClientModel thirdparty = realm.getClientByClientId("third-party"); // Accurate count now. All sessions of user1 cleared Assert.assertEquals(1, currentSession.sessions().getOfflineSessionsCount(realm, testApp)); Assert.assertEquals(0, currentSession.sessions().getOfflineSessionsCount(realm, thirdparty)); List<UserSessionModel> testAppSessions = currentSession.sessions().getOfflineUserSessions(realm, testApp, 0, 10); Assert.assertEquals(1, testAppSessions.size()); Assert.assertEquals("127.0.0.3", testAppSessions.get(0).getIpAddress()); Assert.assertEquals("user2", testAppSessions.get(0).getUser().getUsername()); UserModel user1 = currentSession.users().getUserByUsername("user1", realm); Set<ClientModel> clients = sessionManager.findClientsWithOfflineToken(realm, user1); Assert.assertEquals(0, clients.size()); }); } @Test @ModelTest public void testOnRealmRemoved(KeycloakSession session) { AtomicReference<String> userSessionID = new AtomicReference<>(); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionRR1) -> { currentSession = sessionRR1; persister = currentSession.getProvider(UserSessionPersisterProvider.class); RealmModel fooRealm = currentSession.realms().createRealm("foo", "foo"); fooRealm.addClient("foo-app"); currentSession.users().addUser(fooRealm, "user3"); UserSessionModel userSession = currentSession.sessions().createUserSession(fooRealm, currentSession.users().getUserByUsername("user3", fooRealm), "user3", "127.0.0.1", "form", true, null, null); userSessionID.set(userSession.getId()); createClientSession(currentSession, fooRealm.getClientByClientId("foo-app"), userSession, "http://redirect", "state"); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionRR2) -> { currentSession = sessionRR2; sessionManager = new UserSessionManager(currentSession); // Persist offline session RealmModel fooRealm = currentSession.realms().getRealm("foo"); UserSessionModel userSession = currentSession.sessions().getUserSession(fooRealm, userSessionID.get()); createOfflineSessionIncludeClientSessions(currentSession, userSession); UserSessionModel offlineUserSession = sessionManager.findOfflineUserSession(fooRealm, userSession.getId()); Assert.assertEquals(offlineUserSession.getAuthenticatedClientSessions().size(), 1); AuthenticatedClientSessionModel offlineClientSession = offlineUserSession.getAuthenticatedClientSessions().values().iterator().next(); Assert.assertEquals("foo-app", offlineClientSession.getClient().getClientId()); Assert.assertEquals("user3", offlineClientSession.getUserSession().getUser().getUsername()); // Remove realm RealmManager realmMgr = new RealmManager(currentSession); realmMgr.removeRealm(realmMgr.getRealm("foo")); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionRR3) -> { currentSession = sessionRR3; RealmModel fooRealm = currentSession.realms().createRealm("foo", "foo"); fooRealm.addClient("foo-app"); currentSession.users().addUser(fooRealm, "user3"); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionRR4) -> { currentSession = sessionRR4; RealmModel fooRealm = currentSession.realms().getRealm("foo"); Assert.assertEquals(0, currentSession.sessions().getOfflineSessionsCount(fooRealm, fooRealm.getClientByClientId("foo-app"))); // Cleanup RealmManager realmMgr = new RealmManager(currentSession); realmMgr.removeRealm(realmMgr.getRealm("foo")); }); } @Test @ModelTest public void testOnClientRemoved(KeycloakSession session) { KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCR) -> { try { int started = Time.currentTime(); AtomicReference<String> userSessionID = new AtomicReference<>(); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCR1) -> { currentSession = sessionCR1; sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); RealmModel fooRealm = currentSession.realms().createRealm("foo", "foo"); fooRealm.addClient("foo-app"); fooRealm.addClient("bar-app"); currentSession.users().addUser(fooRealm, "user3"); UserSessionModel userSession = currentSession.sessions().createUserSession(fooRealm, currentSession.users().getUserByUsername("user3", fooRealm), "user3", "127.0.0.1", "form", true, null, null); userSessionID.set(userSession.getId()); createClientSession(currentSession, fooRealm.getClientByClientId("foo-app"), userSession, "http://redirect", "state"); createClientSession(currentSession, fooRealm.getClientByClientId("bar-app"), userSession, "http://redirect", "state"); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCR2) -> { currentSession = sessionCR2; // Create offline currentSession RealmModel fooRealm = currentSession.realms().getRealm("foo"); UserSessionModel userSession = currentSession.sessions().getUserSession(fooRealm, userSessionID.get()); createOfflineSessionIncludeClientSessions(currentSession, userSession); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCR3) -> { currentSession = sessionCR3; RealmManager realmMgr = new RealmManager(currentSession); ClientManager clientMgr = new ClientManager(realmMgr); RealmModel fooRealm = realmMgr.getRealm("foo"); // Assert currentSession was persisted with both clientSessions UserSessionModel offlineSession = currentSession.sessions().getOfflineUserSession(fooRealm, userSessionID.get()); assertSession(offlineSession, currentSession.users().getUserByUsername("user3", fooRealm), "127.0.0.1", started, started, "foo-app", "bar-app"); // Remove foo-app client ClientModel client = fooRealm.getClientByClientId("foo-app"); clientMgr.removeClient(fooRealm, client); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCR4) -> { currentSession = sessionCR4; RealmManager realmMgr = new RealmManager(currentSession); ClientManager clientMgr = new ClientManager(realmMgr); RealmModel fooRealm = realmMgr.getRealm("foo"); // Assert just one bar-app clientSession persisted now UserSessionModel offlineSession = currentSession.sessions().getOfflineUserSession(fooRealm, userSessionID.get()); Assert.assertEquals(1, offlineSession.getAuthenticatedClientSessions().size()); Assert.assertEquals("bar-app", offlineSession.getAuthenticatedClientSessions().values().iterator().next().getClient().getClientId()); // Remove bar-app client ClientModel client = fooRealm.getClientByClientId("bar-app"); clientMgr.removeClient(fooRealm, client); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionCR5) -> { currentSession = sessionCR5; // Assert nothing loaded - userSession was removed as well because it was last userSession RealmManager realmMgr = new RealmManager(currentSession); RealmModel fooRealm = realmMgr.getRealm("foo"); UserSessionModel offlineSession = currentSession.sessions().getOfflineUserSession(fooRealm, userSessionID.get()); Assert.assertEquals(0, offlineSession.getAuthenticatedClientSessions().size()); }); } catch (Exception e) { throw new RuntimeException(e); } finally { KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionTearDown) -> { currentSession = sessionTearDown; RealmManager realmMgr = new RealmManager(currentSession); RealmModel fooRealm = realmMgr.getRealm("foo"); UserModel user3 = currentSession.users().getUserByUsername("user3", fooRealm); // Remove user3 new UserManager(currentSession).removeUser(fooRealm, user3); // Cleanup realmMgr = new RealmManager(currentSession); realmMgr.removeRealm(realmMgr.getRealm("foo")); }); } }); } @Test @ModelTest public void testOnUserRemoved(KeycloakSession session) { KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionUR) -> { try { int started = Time.currentTime(); AtomicReference<String> userSessionID = new AtomicReference<>(); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionUR1) -> { currentSession = sessionUR1; RealmModel fooRealm = currentSession.realms().createRealm("foo", "foo"); fooRealm.addClient("foo-app"); currentSession.users().addUser(fooRealm, "user3"); UserSessionModel userSession = currentSession.sessions().createUserSession(fooRealm, currentSession.users().getUserByUsername("user3", fooRealm), "user3", "127.0.0.1", "form", true, null, null); userSessionID.set(userSession.getId()); createClientSession(currentSession, fooRealm.getClientByClientId("foo-app"), userSession, "http://redirect", "state"); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionUR2) -> { currentSession = sessionUR2; // Create offline session RealmModel fooRealm = currentSession.realms().getRealm("foo"); UserSessionModel userSession = currentSession.sessions().getUserSession(fooRealm, userSessionID.get()); createOfflineSessionIncludeClientSessions(currentSession, userSession); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionUR3) -> { currentSession = sessionUR3; RealmManager realmMgr = new RealmManager(currentSession); RealmModel fooRealm = realmMgr.getRealm("foo"); UserModel user3 = currentSession.users().getUserByUsername("user3", fooRealm); // Assert session was persisted with both clientSessions UserSessionModel offlineSession = currentSession.sessions().getOfflineUserSession(fooRealm, userSessionID.get()); assertSession(offlineSession, user3, "127.0.0.1", started, started, "foo-app"); }); } catch (Exception e) { throw new RuntimeException(e); } finally { KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionTearDown) -> { currentSession = sessionTearDown; RealmManager realmMgr = new RealmManager(currentSession); RealmModel fooRealm = realmMgr.getRealm("foo"); UserModel user3 = currentSession.users().getUserByUsername("user3", fooRealm); // Remove user3 new UserManager(currentSession).removeUser(fooRealm, user3); // Cleanup realmMgr = new RealmManager(currentSession); realmMgr.removeRealm(realmMgr.getRealm("foo")); }); } }); } @Test @ModelTest public void testExpired(KeycloakSession session) { // Suspend periodic tasks to avoid race-conditions, which may cause missing updates of lastSessionRefresh times to UserSessionPersisterProvider TimerProvider timer = session.getProvider(TimerProvider.class); TimerProvider.TimerTaskContext timerTaskCtx = timer.cancelTask(PersisterLastSessionRefreshStoreFactory.DB_LSR_PERIODIC_TASK_NAME); log.info("Cancelled periodic task " + PersisterLastSessionRefreshStoreFactory.DB_LSR_PERIODIC_TASK_NAME); try { AtomicReference<UserSessionModel[]> origSessionsAt = new AtomicReference<>(); // Key is userSessionId, value is set of client UUIDS Map<String, Set<String>> offlineSessions = new HashMap<>(); ClientModel[] testApp = new ClientModel[1]; KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionExpired1) -> { // Create some online sessions in infinispan currentSession = sessionExpired1; reloadState(currentSession); UserSessionModel[] origSessions = createSessions(currentSession); origSessionsAt.set(origSessions); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionExpired2) -> { currentSession = sessionExpired2; realm = currentSession.realms().getRealm("test"); sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); // Persist 3 created userSessions and clientSessions as offline testApp[0] = realm.getClientByClientId("test-app"); List<UserSessionModel> userSessions = currentSession.sessions().getUserSessions(realm, testApp[0]); for (UserSessionModel userSession : userSessions) { offlineSessions.put(userSession.getId(), createOfflineSessionIncludeClientSessions(currentSession, userSession)); } // Assert all previously saved offline sessions found for (Map.Entry<String, Set<String>> entry : offlineSessions.entrySet()) { UserSessionModel foundSession = sessionManager.findOfflineUserSession(realm, entry.getKey()); Assert.assertEquals(foundSession.getAuthenticatedClientSessions().keySet(), entry.getValue()); } }); log.info("Persisted 3 sessions to UserSessionPersisterProvider"); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionExpired3) -> { currentSession = sessionExpired3; realm = currentSession.realms().getRealm("test"); persister = currentSession.getProvider(UserSessionPersisterProvider.class); UserSessionModel[] origSessions = origSessionsAt.get(); UserSessionModel session0 = currentSession.sessions().getOfflineUserSession(realm, origSessions[0].getId()); Assert.assertNotNull(session0); // sessions are in persister too Assert.assertEquals(3, persister.getUserSessionsCount(true)); Time.setOffset(300); log.infof("Set time offset to 300. Time is: %d", Time.currentTime()); // Set lastSessionRefresh to currentSession[0] to 0 session0.setLastSessionRefresh(Time.currentTime()); }); // Increase timeOffset and update LSR of the session two times - first to 20 days and then to 21 days. At least one of updates // will propagate to PersisterLastSessionRefreshStore and update DB (Single update is not 100% sure as there is still a // chance of delayed periodic task to be run in the meantime and causing race-condition, which would mean LSR not updated in the DB) for (int i=0 ; i<2 ; i++) { int timeOffset = 1728000 + (i * 86400); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionExpired4) -> { currentSession = sessionExpired4; realm = currentSession.realms().getRealm("test"); UserSessionModel[] origSessions = origSessionsAt.get(); Time.setOffset(timeOffset); log.infof("Set time offset to %d. Time is: %d", timeOffset, Time.currentTime()); UserSessionModel session0 = currentSession.sessions().getOfflineUserSession(realm, origSessions[0].getId()); session0.setLastSessionRefresh(Time.currentTime()); }); } KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionExpired5) -> { currentSession = sessionExpired5; realm = currentSession.realms().getRealm("test"); persister = currentSession.getProvider(UserSessionPersisterProvider.class); // Increase timeOffset - 40 days Time.setOffset(3456000); log.infof("Set time offset to 3456000. Time is: %d", Time.currentTime()); // Expire and ensure that all sessions despite session0 were removed currentSession.sessions().removeExpired(realm); persister.removeExpired(realm); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionExpired6) -> { currentSession = sessionExpired6; realm = currentSession.realms().getRealm("test"); persister = currentSession.getProvider(UserSessionPersisterProvider.class); UserSessionModel[] origSessions = origSessionsAt.get(); // assert session0 is the only session found Assert.assertNotNull(currentSession.sessions().getOfflineUserSession(realm, origSessions[0].getId())); Assert.assertNull(currentSession.sessions().getOfflineUserSession(realm, origSessions[1].getId())); Assert.assertNull(currentSession.sessions().getOfflineUserSession(realm, origSessions[2].getId())); Assert.assertEquals(1, persister.getUserSessionsCount(true)); // Expire everything and assert nothing found Time.setOffset(6000000); currentSession.sessions().removeExpired(realm); persister.removeExpired(realm); }); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), (KeycloakSession sessionExpired7) -> { currentSession = sessionExpired7; realm = currentSession.realms().getRealm("test"); sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); for (String userSessionId : offlineSessions.keySet()) { Assert.assertNull(sessionManager.findOfflineUserSession(realm, userSessionId)); } Assert.assertEquals(0, persister.getUserSessionsCount(true)); }); } finally { Time.setOffset(0); session.getKeycloakSessionFactory().publish(new ResetTimeOffsetEvent()); timer.schedule(timerTaskCtx.getRunnable(), timerTaskCtx.getIntervalMillis(), PersisterLastSessionRefreshStoreFactory.DB_LSR_PERIODIC_TASK_NAME); } } private static Set<String> createOfflineSessionIncludeClientSessions(KeycloakSession session, UserSessionModel userSession) { Set<String> offlineSessions = new HashSet<>(); UserSessionManager localManager = new UserSessionManager(session); for (AuthenticatedClientSessionModel clientSession : userSession.getAuthenticatedClientSessions().values()) { localManager.createOrUpdateOfflineSession(clientSession, userSession); offlineSessions.add(clientSession.getClient().getId()); } return offlineSessions; } public static void assertSession(UserSessionModel session, UserModel user, String ipAddress, int started, int lastRefresh, String... clients) { assertEquals(user.getId(), session.getUser().getId()); assertEquals(ipAddress, session.getIpAddress()); assertEquals(user.getUsername(), session.getLoginUsername()); assertEquals("form", session.getAuthMethod()); assertTrue(session.isRememberMe()); assertTrue((session.getStarted() >= started - 1) && (session.getStarted() <= started + 1)); assertTrue((session.getLastSessionRefresh() >= lastRefresh - 1) && (session.getLastSessionRefresh() <= lastRefresh + 1)); String[] actualClients = new String[session.getAuthenticatedClientSessions().size()]; int i = 0; for (Map.Entry<String, AuthenticatedClientSessionModel> entry : session.getAuthenticatedClientSessions().entrySet()) { String clientUUID = entry.getKey(); AuthenticatedClientSessionModel clientSession = entry.getValue(); Assert.assertEquals(clientUUID, clientSession.getClient().getId()); actualClients[i] = clientSession.getClient().getClientId(); i++; } } private static AuthenticatedClientSessionModel createClientSession(KeycloakSession sessionParam, ClientModel client, UserSessionModel userSession, String redirect, String state) { AuthenticatedClientSessionModel clientSession = sessionParam.sessions().createClientSession(client.getRealm(), client, userSession); clientSession.setRedirectUri(redirect); if (state != null) clientSession.setNote(OIDCLoginProtocol.STATE_PARAM, state); return clientSession; } private static UserSessionModel[] createSessions(KeycloakSession session) { UserSessionModel[] sessions = new UserSessionModel[3]; sessions[0] = session.sessions().createUserSession(realm, currentSession.users().getUserByUsername("user1", realm), "user1", "127.0.0.1", "form", true, null, null); Set<String> roles = new HashSet<String>(); roles.add("one"); roles.add("two"); Set<String> protocolMappers = new HashSet<String>(); protocolMappers.add("mapper-one"); protocolMappers.add("mapper-two"); createClientSession(session, realm.getClientByClientId("test-app"), sessions[0], "http://redirect", "state"); createClientSession(session, realm.getClientByClientId("third-party"), sessions[0], "http://redirect", "state"); sessions[1] = session.sessions().createUserSession(realm, session.users().getUserByUsername("user1", realm), "user1", "127.0.0.2", "form", true, null, null); createClientSession(session, realm.getClientByClientId("test-app"), sessions[1], "http://redirect", "state"); sessions[2] = session.sessions().createUserSession(realm, session.users().getUserByUsername("user2", realm), "user2", "127.0.0.3", "form", true, null, null); createClientSession(session, realm.getClientByClientId("test-app"), sessions[2], "http://redirect", "state"); return sessions; } public static void reloadState(KeycloakSession session) { reloadState(session, false); } public static void reloadState(KeycloakSession session, Boolean initialConfig) { currentSession = session; realm = currentSession.realms().getRealm("test"); if (initialConfig) { currentSession.users().addUser(realm, "user1").setEmail("user1@localhost"); currentSession.users().addUser(realm, "user2").setEmail("user2@localhost"); } sessionManager = new UserSessionManager(currentSession); persister = currentSession.getProvider(UserSessionPersisterProvider.class); } @Override public void configureTestRealm(RealmRepresentation testRealm) { } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.journal.readerwriter.multicast; import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; import javax.xml.namespace.QName; import javax.xml.stream.XMLEventFactory; import javax.xml.stream.XMLStreamException; import org.fcrepo.server.journal.AbstractJournalTester; import org.fcrepo.server.journal.JournalException; import org.fcrepo.server.journal.MockServerForJournalTesting; import org.fcrepo.server.management.MockManagementDelegate; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; public class TestLocalDirectoryTransport extends AbstractJournalTester { // Supports legacy test runners public static junit.framework.Test suite() { return new junit.framework.JUnit4TestAdapter(TestMulticastJournalWriterInitializations.class); } private static final String EXPECTED_JOURNAL_1_CONTENTS = "<?xml " + "version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<FedoraJournal repositoryHash=\"firstSillyHash\" " + "timestamp=\"2007-03-05T16:49:21.392-0500\">\n" + " <junkElement1a></junkElement1a>\n" + " <junkElement1b></junkElement1b></FedoraJournal>\n"; private static final String EXPECTED_JOURNAL_2_CONTENTS = "<?xml " + "version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<FedoraJournal repositoryHash=\"secondSillyHash\" " + "timestamp=\"2007-03-05T16:49:21.392-0500\">\n" + " <junkElement2></junkElement2></FedoraJournal>\n"; @Rule public TemporaryFolder folder = new TemporaryFolder(); private File journalDirectory; // immaterial to the test - required by the constructor. private static final boolean CRUCIAL = true; private Map<String, String> parameters; private MockMulticastJournalWriter parent; @Before public void createJournalDirectory() throws IOException { journalDirectory = folder.newFolder("TestLocalDirectoryTransport"); } @Before public void initalizeBasicParameters() { parameters = new HashMap<String, String>(); parameters.put(LocalDirectoryTransport.PARAMETER_DIRECTORY_PATH, journalDirectory.getAbsolutePath()); } @Before public void initializeTransportParent() throws JournalException { MockServerForJournalTesting server = new MockServerForJournalTesting(new MockManagementDelegate(), "myHashValue"); parent = new MockMulticastJournalWriter(new HashMap<String, String>(), null, server); } @Test public void testParameterNoDirectoryPath() { parameters.remove(LocalDirectoryTransport.PARAMETER_DIRECTORY_PATH); try { new LocalDirectoryTransport(parameters, CRUCIAL, null); fail("expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testParametersInvalidDirectory() { parameters.put(LocalDirectoryTransport.PARAMETER_DIRECTORY_PATH, "BogusDirectoryName"); try { new LocalDirectoryTransport(parameters, CRUCIAL, null); fail("expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testParametersSuccess() throws JournalException { new LocalDirectoryTransport(parameters, CRUCIAL, null); } @Test public void testOperations() throws JournalException, IOException, XMLStreamException, ParseException { XMLEventFactory factory = XMLEventFactory.newInstance(); QName name1a = new QName("junkElement1a"); QName name1b = new QName("junkElement1b"); QName name2 = new QName("junkElement2"); SimpleDateFormat parser = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); // open creates the temp file parent.setCurrentDate(parser.parse("2007-03-05T16:49:21.392-0500")); transport .openFile("firstSillyHash", "fileOne", parent.getCurrentDate()); File tempfile1 = new File(journalDirectory, "_fileOne"); assertFileExists(tempfile1); // write to the file transport.getWriter().add(factory .createStartElement(name1a, null, null)); transport.getWriter().add(factory.createEndElement(name1a, null)); transport.getWriter().add(factory .createStartElement(name1b, null, null)); transport.getWriter().add(factory.createEndElement(name1b, null)); // closing renames the file transport.closeFile(); File file1 = new File(journalDirectory, "fileOne"); assertFileExists(file1); assertFileDoesNotExist(tempfile1); // open creates another temp file transport.openFile("secondSillyHash", "fileTwo", parent .getCurrentDate()); File tempfile2 = new File(journalDirectory, "_fileTwo"); assertFileExists(tempfile2); // write to the file transport.getWriter() .add(factory.createStartElement(name2, null, null)); transport.getWriter().add(factory.createEndElement(name2, null)); // closing renames the file transport.closeFile(); File file2 = new File(journalDirectory, "fileTwo"); assertFileExists(file2); assertFileDoesNotExist(tempfile2); // shut it down transport.shutdown(); // did we write what was expected? assertFileContents(EXPECTED_JOURNAL_1_CONTENTS, file1); assertFileContents(EXPECTED_JOURNAL_2_CONTENTS, file2); } @Test public void testUnableToCreateFile() throws JournalException { LocalDirectoryTransport transport = new LocalDirectoryTransport(parameters, CRUCIAL, null); try { transport.openFile("firstSillyHash", ":", new Date()); fail("expecting JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testFileAlreadyExists() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.openFile("firstSillyHash", "fileOne", new Date()); transport.closeFile(); try { transport.openFile("secondSillyHash", "fileOne", new Date()); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testOpenAfterShutdown() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.shutdown(); try { transport.openFile("firstSillyHash", "fileOne", new Date()); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testGetWriterAfterShutdown() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.openFile("firstSillyHash", "fileOne", new Date()); transport.closeFile(); transport.shutdown(); try { transport.getWriter(); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testCloseAfterShutdown() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.shutdown(); try { transport.closeFile(); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testShutdownAfterShutdown() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.shutdown(); // repeated shutdowns are no problem. transport.shutdown(); } @Test public void testOpenAfterOpen() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.openFile("firstSillyHash", "Open", new Date()); try { transport.openFile("firstSillyHash", "OpenOpen", new Date()); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testShutdownAfterOpen() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.openFile("firstSillyHash", "OpenBeforeShutdown", new Date()); try { transport.shutdown(); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception - now close the file so we can clean up. transport.closeFile(); } } @Test public void testCloseAfterClose() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.openFile("firstSillyHash", "CloseClose", new Date()); transport.closeFile(); try { transport.closeFile(); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception } } @Test public void testGetWriterAfterClose() throws JournalException { Transport transport = new LocalDirectoryTransport(parameters, CRUCIAL, parent); transport.openFile("whoCaresHash", "CloseGetWriter", new Date()); transport.closeFile(); try { transport.getWriter(); fail("Expected a JournalException"); } catch (JournalException e) { // expected the exception } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.remote; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.openqa.selenium.Capabilities; import org.openqa.selenium.Cookie; import org.openqa.selenium.Platform; import org.openqa.selenium.logging.LogType; import org.openqa.selenium.logging.LoggingPreferences; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.gson.JsonArray; import com.google.gson.JsonNull; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; @RunWith(JUnit4.class) public class JsonToBeanConverterTest { @Test public void testCanConstructASimpleString() throws Exception { String text = new JsonToBeanConverter().convert(String.class, "cheese"); assertThat(text, is("cheese")); } @SuppressWarnings("unchecked") @Test public void testCanPopulateAMap() throws Exception { JsonObject toConvert = new JsonObject(); toConvert.addProperty("cheese", "brie"); toConvert.addProperty("foodstuff", "cheese"); Map<String, String> map = new JsonToBeanConverter().convert(Map.class, toConvert.toString()); assertThat(map.size(), is(2)); assertThat(map, hasEntry("cheese", "brie")); assertThat(map, hasEntry("foodstuff", "cheese")); } @Test public void testCanPopulateAMapThatContainsNull() throws Exception { JsonObject toConvert = new JsonObject(); toConvert.add("foo", JsonNull.INSTANCE); Map<?,?> converted = new JsonToBeanConverter().convert(Map.class, toConvert.toString()); assertEquals(1, converted.size()); assertTrue(converted.containsKey("foo")); assertNull(converted.get("foo")); } @Test public void testCanPopulateASimpleBean() throws Exception { JsonObject toConvert = new JsonObject(); toConvert.addProperty("value", "time"); SimpleBean bean = new JsonToBeanConverter().convert(SimpleBean.class, toConvert.toString()); assertThat(bean.getValue(), is("time")); } @Test public void testWillSilentlyDiscardUnusedFieldsWhenPopulatingABean() throws Exception { JsonObject toConvert = new JsonObject(); toConvert.addProperty("value", "time"); toConvert.addProperty("frob", "telephone"); SimpleBean bean = new JsonToBeanConverter().convert(SimpleBean.class, toConvert.toString()); assertThat(bean.getValue(), is("time")); } @Test public void testShouldSetPrimitiveValuesToo() throws Exception { JsonObject toConvert = new JsonObject(); toConvert.addProperty("magicNumber", 3); Map<?,?> map = new JsonToBeanConverter().convert(Map.class, toConvert.toString()); assertEquals(3L, map.get("magicNumber")); } @Test public void testShouldPopulateFieldsOnNestedBeans() throws Exception { JsonObject toConvert = new JsonObject(); toConvert.addProperty("name", "frank"); JsonObject child = new JsonObject(); child.addProperty("value", "lots"); toConvert.add("bean", child); ContainingBean bean = new JsonToBeanConverter().convert(ContainingBean.class, toConvert.toString()); assertThat(bean.getName(), is("frank")); assertThat(bean.getBean().getValue(), is("lots")); } @Test public void testShouldProperlyFillInACapabilitiesObject() throws Exception { DesiredCapabilities capabilities = new DesiredCapabilities("browser", CapabilityType.VERSION, Platform.ANY); capabilities.setJavascriptEnabled(true); String text = new BeanToJsonConverter().convert(capabilities); DesiredCapabilities readCapabilities = new JsonToBeanConverter().convert(DesiredCapabilities.class, text); assertEquals(capabilities, readCapabilities); } @Test public void testShouldBeAbleToInstantiateBooleans() throws Exception { JsonArray array = new JsonArray(); array.add(new JsonPrimitive(true)); array.add(new JsonPrimitive(false)); boolean first = new JsonToBeanConverter().convert(Boolean.class, array.get(0)); boolean second = new JsonToBeanConverter().convert(Boolean.class, array.get(1)); assertTrue(first); assertFalse(second); } @Test public void testShouldUseAMapToRepresentComplexObjects() throws Exception { JsonObject toModel = new JsonObject(); toModel.addProperty("thing", "hairy"); toModel.addProperty("hairy", "true"); Map<?,?> modelled = (Map<?,?>) new JsonToBeanConverter().convert(Object.class, toModel.toString()); assertEquals(2, modelled.size()); } @Test public void testShouldConvertAResponseWithAnElementInIt() throws Exception { String json = "{\"value\":{\"value\":\"\",\"text\":\"\",\"selected\":false,\"enabled\":true,\"id\":\"three\"},\"context\":\"con\",\"sessionId\":\"sess\",\"error\":false}"; Response converted = new JsonToBeanConverter().convert(Response.class, json); Map<?,?> value = (Map<?,?>) converted.getValue(); assertEquals("three", value.get("id")); } @Test public void testConvertABlankStringAsAStringEvenWhenAskedToReturnAnObject() throws Exception { Object o = new JsonToBeanConverter().convert(Object.class, ""); assertTrue(o instanceof String); } @Test public void testShouldBeAbleToCopeWithStringsThatLookLikeBooleans() throws Exception { String json = "{\"value\":\"false\",\"context\":\"foo\",\"sessionId\":\"1210083863107\",\"error\":false}"; try { new JsonToBeanConverter().convert(Response.class, json); } catch (Exception e) { e.printStackTrace(); fail("This should have worked"); } } @Test public void testShouldBeAbleToSetAnObjectToABoolean() throws Exception { String json = "{\"value\":true,\"context\":\"foo\",\"sessionId\":\"1210084658750\",\"error\":false}"; Response response = new JsonToBeanConverter().convert(Response.class, json); assertThat((Boolean) response.getValue(), is(true)); } @Test public void testCanHandleValueBeingAnArray() throws Exception { String[] value = {"Cheese", "Peas"}; Response response = new Response(); response.setSessionId("bar"); response.setValue(value); response.setStatus(1512); String json = new BeanToJsonConverter().convert(response); Response converted = new JsonToBeanConverter().convert(Response.class, json); assertEquals("bar", response.getSessionId()); assertEquals(2, ((List<?>) converted.getValue()).size()); assertEquals(1512, response.getStatus()); } @Test public void testShouldConvertObjectsInArraysToMaps() throws Exception { Date date = new Date(); Cookie cookie = new Cookie("foo", "bar", "localhost", "/rooted", date, true, true); String rawJson = new BeanToJsonConverter().convert(Collections.singletonList(cookie)); List<?> list = new JsonToBeanConverter().convert(List.class, rawJson); Object first = list.get(0); assertTrue(first instanceof Map); Map<?,?> map = (Map<?,?>) first; assertMapEntry(map, "name", "foo"); assertMapEntry(map, "value", "bar"); assertMapEntry(map, "domain", "localhost"); assertMapEntry(map, "path", "/rooted"); assertMapEntry(map, "secure", true); assertMapEntry(map, "httpOnly", true); assertMapEntry(map, "expiry", TimeUnit.MILLISECONDS.toSeconds(date.getTime())); } private void assertMapEntry(Map<?,?> map, String key, Object expected) { assertTrue("Missing key: " + key, map.containsKey(key)); assertEquals("Wrong value for key: " + key + ": " + map.get(key).getClass().getName(), expected, map.get(key)); } @Test public void testShouldConvertAnArrayBackIntoAnArray() throws Exception { Exception e = new Exception(); String converted = new BeanToJsonConverter().convert(e); Map<?,?> reconstructed = new JsonToBeanConverter().convert(Map.class, converted); List<?> trace = (List<?>) reconstructed.get("stackTrace"); assertTrue(trace.get(0) instanceof Map); } @Test public void testShouldBeAbleToReconsituteASessionId() throws Exception { String json = new BeanToJsonConverter().convert(new SessionId("id")); SessionId sessionId = new JsonToBeanConverter().convert(SessionId.class, json); assertEquals("id", sessionId.toString()); } @Test public void testShouldBeAbleToConvertACommand() throws Exception { SessionId sessionId = new SessionId("session id"); Command original = new Command(sessionId, DriverCommand.NEW_SESSION, new HashMap<String, String>() { { put("food", "cheese"); } }); String raw = new BeanToJsonConverter().convert(original); Command converted = new JsonToBeanConverter().convert(Command.class, raw); assertEquals(sessionId.toString(), converted.getSessionId().toString()); assertEquals(original.getName(), converted.getName()); assertEquals(1, converted.getParameters().keySet().size()); assertEquals("cheese", converted.getParameters().get("food")); } @Test public void testShouldConvertCapabilitiesToAMapAndIncludeCustomValues() throws Exception { DesiredCapabilities caps = new DesiredCapabilities(); caps.setCapability("furrfu", "fishy"); String raw = new BeanToJsonConverter().convert(caps); Capabilities converted = new JsonToBeanConverter().convert(Capabilities.class, raw); assertEquals("fishy", converted.getCapability("furrfu")); } @Test public void testShouldParseCapabilitiesWithLoggingPreferences() throws Exception { JsonObject prefs = new JsonObject(); prefs.addProperty("browser", "WARNING"); prefs.addProperty("client", "DEBUG"); prefs.addProperty("driver", "ALL"); prefs.addProperty("server", "OFF"); JsonObject caps = new JsonObject(); caps.add(CapabilityType.LOGGING_PREFS, prefs); Capabilities converted = new JsonToBeanConverter() .convert(Capabilities.class, caps.toString()); LoggingPreferences lp = (LoggingPreferences) converted.getCapability(CapabilityType.LOGGING_PREFS); assertNotNull(lp); assertEquals(Level.WARNING, lp.getLevel(LogType.BROWSER)); assertEquals(Level.FINE, lp.getLevel(LogType.CLIENT)); assertEquals(Level.ALL, lp.getLevel(LogType.DRIVER)); assertEquals(Level.OFF, lp.getLevel(LogType.SERVER)); } @Test public void testShouldNotParseQuotedJsonObjectsAsActualJsonObjects() { JsonObject inner = new JsonObject(); inner.addProperty("color", "green"); inner.addProperty("number", 123); JsonObject outer = new JsonObject(); outer.addProperty("inner", inner.toString()); String jsonStr = outer.toString(); Object convertedOuter = new JsonToBeanConverter().convert(Map.class, jsonStr); assertThat(convertedOuter, instanceOf(Map.class)); Object convertedInner = ((Map<?,?>) convertedOuter).get("inner"); assertNotNull(convertedInner); assertThat(convertedInner, instanceOf(String.class)); assertThat(convertedInner.toString(), equalTo(inner.toString())); } @Test public void shouldBeAbleToConvertASelenium3CommandToASelenium2Command() { SessionId expectedId = new SessionId("thisisakey"); JsonObject rawJson = new JsonObject(); // In selenium 2, the sessionId is an object. In selenium 3, it's a straight string. rawJson.addProperty("sessionId", expectedId.toString()); rawJson.addProperty("name", "some command"); rawJson.add("parameters", new JsonObject()); String stringified = rawJson.toString(); Command converted = new JsonToBeanConverter().convert(Command.class, stringified); assertEquals(expectedId, converted.getSessionId()); } @Test public void testShouldCallFromJsonMethodIfPresent() { JsonAware res = new JsonToBeanConverter().convert(JsonAware.class, "converted"); assertEquals("converted", res.convertedValue); } // Test for issue 8187 @Test public void testDecodingResponseWithNumbersInValueObject() { Response response = new JsonToBeanConverter() .convert(Response.class, "{\"status\":0,\"value\":{\"width\":96,\"height\":46.19140625}}"); @SuppressWarnings("unchecked") Map<String, Number> value = (Map<String, Number>) response.getValue(); assertEquals(96, value.get("width").intValue()); assertEquals(46, value.get("height").intValue()); assertEquals(46.19140625, value.get("height").doubleValue(), 0.00001); } @Test public void testShouldRecognizeNumericStatus() { Response response = new JsonToBeanConverter() .convert(Response.class, "{\"status\":0,\"value\":\"cheese\"}"); assertEquals(0, response.getStatus()); assertEquals(ErrorCodes.toState(0), response.getState()); @SuppressWarnings("unchecked") String value = (String) response.getValue(); assertEquals("cheese", value); } @Test public void testShouldRecognizeStringStatus() { Response response = new JsonToBeanConverter() .convert(Response.class, "{\"status\":\"success\",\"value\":\"cheese\"}"); assertEquals(0, response.getStatus()); assertEquals(ErrorCodes.toState(0), response.getState()); @SuppressWarnings("unchecked") String value = (String) response.getValue(); assertEquals("cheese", value); } @Test public void testShouldRecognizeStringState() { Response response = new JsonToBeanConverter() .convert(Response.class, "{\"state\":\"success\",\"value\":\"cheese\"}"); assertEquals("success", response.getState()); assertEquals(0, response.getStatus()); @SuppressWarnings("unchecked") String value = (String) response.getValue(); assertEquals("cheese", value); } public static class SimpleBean { private String value; public String getValue() { return value; } public void setValue(String value) { this.value = value; } } public static class ContainingBean { private String name; private SimpleBean bean; public String getName() { return name; } public void setName(String name) { this.name = name; } public SimpleBean getBean() { return bean; } public void setBean(SimpleBean bean) { this.bean = bean; } } public static class JsonAware { private String convertedValue; public JsonAware(String convertedValue) { this.convertedValue = convertedValue; } public static JsonAware fromJson(String json) { return new JsonAware(json); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.service; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import org.apache.commons.io.FileUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.registry.client.binding.RegistryPathUtils; import org.apache.hadoop.registry.client.binding.RegistryUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.service.api.records.Component; import org.apache.hadoop.yarn.service.api.records.ComponentState; import org.apache.hadoop.yarn.service.api.records.Configuration; import org.apache.hadoop.yarn.service.api.records.Container; import org.apache.hadoop.yarn.service.api.records.PlacementConstraint; import org.apache.hadoop.yarn.service.api.records.PlacementPolicy; import org.apache.hadoop.yarn.service.api.records.PlacementScope; import org.apache.hadoop.yarn.service.api.records.PlacementType; import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.client.ServiceClient; import org.apache.hadoop.yarn.service.conf.YarnServiceConstants; import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; import org.apache.hadoop.yarn.service.utils.SliderFileSystem; import org.hamcrest.CoreMatchers; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.*; import java.util.concurrent.TimeoutException; import static org.assertj.core.api.Assertions.assertThat; import static org.apache.hadoop.yarn.api.records.YarnApplicationState.FINISHED; import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.*; import static org.apache.hadoop.yarn.service.exceptions.LauncherExitCodes.EXIT_COMMAND_ARGUMENT_ERROR; import static org.apache.hadoop.yarn.service.exceptions.LauncherExitCodes.EXIT_NOT_FOUND; /** * End to end tests to test deploying services with MiniYarnCluster and a in-JVM * ZK testing cluster. */ public class TestYarnNativeServices extends ServiceTestUtils { private static final Logger LOG = LoggerFactory.getLogger(TestYarnNativeServices.class); @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); @Before public void setup() throws Exception { File tmpYarnDir = new File("target", "tmp"); FileUtils.deleteQuietly(tmpYarnDir); } @After public void tearDown() throws IOException { shutdown(); } // End-to-end test to use ServiceClient to deploy a service. // 1. Create a service with 2 components, each of which has 2 containers // 2. Flex up each component to 3 containers and check the component instance names // 3. Flex down each component to 1 container and check the component instance names // 4. Flex up each component to 2 containers and check the component instance names // 5. Stop the service // 6. Destroy the service @Test (timeout = 200000) public void testCreateFlexStopDestroyService() throws Exception { setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); Service exampleApp = createExampleApplication(); client.actionCreate(exampleApp); SliderFileSystem fileSystem = new SliderFileSystem(getConf()); Path appDir = fileSystem.buildClusterDirPath(exampleApp.getName()); // check app.json is persisted. Assert.assertTrue( getFS().exists(new Path(appDir, exampleApp.getName() + ".json"))); waitForServiceToBeStable(client, exampleApp); // Flex two components, each from 2 container to 3 containers. flexComponents(client, exampleApp, 3L); // wait for flex to be completed, increase from 2 to 3 containers. waitForServiceToBeStable(client, exampleApp); // check all instances name for each component are in sequential order. checkCompInstancesInOrder(client, exampleApp); // flex down to 1 flexComponents(client, exampleApp, 1L); waitForServiceToBeStable(client, exampleApp); checkCompInstancesInOrder(client, exampleApp); // check component dir and registry are cleaned up. // flex up again to 2 flexComponents(client, exampleApp, 2L); waitForServiceToBeStable(client, exampleApp); checkCompInstancesInOrder(client, exampleApp); // stop the service LOG.info("Stop the service"); client.actionStop(exampleApp.getName(), true); ApplicationReport report = client.getYarnClient() .getApplicationReport(ApplicationId.fromString(exampleApp.getId())); // AM unregisters with RM successfully Assert.assertEquals(FINISHED, report.getYarnApplicationState()); Assert.assertEquals(FinalApplicationStatus.ENDED, report.getFinalApplicationStatus()); String serviceZKPath = RegistryUtils.servicePath(RegistryUtils .currentUser(), YarnServiceConstants.APP_TYPE, exampleApp.getName()); Assert.assertFalse("Registry ZK service path still exists after stop", getCuratorService().zkPathExists(serviceZKPath)); LOG.info("Destroy the service"); // destroy the service and check the app dir is deleted from fs. Assert.assertEquals(0, client.actionDestroy(exampleApp.getName())); // check the service dir on hdfs (in this case, local fs) are deleted. Assert.assertFalse(getFS().exists(appDir)); // check that destroying again does not succeed Assert.assertEquals(EXIT_NOT_FOUND, client.actionDestroy(exampleApp.getName())); } // Save a service without starting it and ensure that stop does not NPE and // that service can be successfully destroyed @Test (timeout = 200000) public void testStopDestroySavedService() throws Exception { setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); Service exampleApp = createExampleApplication(); client.actionBuild(exampleApp); Assert.assertEquals(EXIT_COMMAND_ARGUMENT_ERROR, client.actionStop( exampleApp.getName())); Assert.assertEquals(0, client.actionDestroy(exampleApp.getName())); } // Create compa with 2 containers // Create compb with 2 containers which depends on compa // Create compc with 2 containers which depends on compb // Check containers for compa started before containers for compb before // containers for compc @Test (timeout = 200000) public void testComponentStartOrder() throws Exception { setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); Service exampleApp = new Service(); exampleApp.setName("teststartorder"); exampleApp.setVersion("v1"); exampleApp.addComponent(createComponent("compa", 2, "sleep 1000")); // Let compb depend on compa Component compb = createComponent("compb", 2, "sleep 1000"); compb.setDependencies(Collections.singletonList("compa")); exampleApp.addComponent(compb); // Let compc depend on compb Component compc = createComponent("compc", 2, "sleep 1000"); compc.setDependencies(Collections.singletonList("compb")); exampleApp.addComponent(compc); client.actionCreate(exampleApp); waitForServiceToBeStable(client, exampleApp); // check that containers for compa are launched before containers for compb checkContainerLaunchDependencies(client, exampleApp, "compa", "compb", "compc"); client.actionStop(exampleApp.getName(), true); client.actionDestroy(exampleApp.getName()); } @Test(timeout = 200000) public void testCreateServiceSameNameDifferentUser() throws Exception { String sameAppName = "same-name"; String userA = "usera"; String userB = "userb"; setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); String origBasePath = getConf().get(YARN_SERVICE_BASE_PATH); Service userAApp = new Service(); userAApp.setName(sameAppName); userAApp.setVersion("v1"); userAApp.addComponent(createComponent("comp", 1, "sleep 1000")); Service userBApp = new Service(); userBApp.setName(sameAppName); userBApp.setVersion("v1"); userBApp.addComponent(createComponent("comp", 1, "sleep 1000")); File userABasePath = null, userBBasePath = null; try { userABasePath = new File(origBasePath, userA); userABasePath.mkdirs(); getConf().set(YARN_SERVICE_BASE_PATH, userABasePath.getAbsolutePath()); client.actionCreate(userAApp); waitForServiceToBeStarted(client, userAApp); userBBasePath = new File(origBasePath, userB); userBBasePath.mkdirs(); getConf().set(YARN_SERVICE_BASE_PATH, userBBasePath.getAbsolutePath()); client.actionBuild(userBApp); } catch (Exception e) { Assert .fail("Exception should not be thrown - " + e.getLocalizedMessage()); } finally { if (userABasePath != null) { getConf().set(YARN_SERVICE_BASE_PATH, userABasePath.getAbsolutePath()); client.actionStop(sameAppName, true); client.actionDestroy(sameAppName); } if (userBBasePath != null) { getConf().set(YARN_SERVICE_BASE_PATH, userBBasePath.getAbsolutePath()); client.actionDestroy(sameAppName); } } // Need to extend this test to validate that different users can create // apps of exact same name. So far only create followed by build is tested. // Need to test create followed by create. } @Test(timeout = 200000) public void testCreateServiceSameNameSameUser() throws Exception { String sameAppName = "same-name"; String user = UserGroupInformation.getCurrentUser().getUserName(); System.setProperty("user.name", user); setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); Service appA = new Service(); appA.setName(sameAppName); appA.setVersion("v1"); appA.addComponent(createComponent("comp", 1, "sleep 1000")); Service appB = new Service(); appB.setName(sameAppName); appB.setVersion("v1"); appB.addComponent(createComponent("comp", 1, "sleep 1000")); try { client.actionBuild(appA); client.actionBuild(appB); } catch (Exception e) { String expectedMsg = "Service Instance dir already exists:"; if (e.getLocalizedMessage() != null) { Assert.assertThat(e.getLocalizedMessage(), CoreMatchers.containsString(expectedMsg)); } else { Assert.fail("Message cannot be null. It has to say - " + expectedMsg); } } finally { // cleanup client.actionDestroy(sameAppName); } try { client.actionCreate(appA); waitForServiceToBeStarted(client, appA); client.actionCreate(appB); waitForServiceToBeStarted(client, appB); } catch (Exception e) { String expectedMsg = "Failed to create service " + sameAppName + ", because it already exists."; if (e.getLocalizedMessage() != null) { Assert.assertThat(e.getLocalizedMessage(), CoreMatchers.containsString(expectedMsg)); } else { Assert.fail("Message cannot be null. It has to say - " + expectedMsg); } } finally { // cleanup client.actionStop(sameAppName, true); client.actionDestroy(sameAppName); } } // Test to verify recovery of SeviceMaster after RM is restarted. // 1. Create an example service. // 2. Restart RM. // 3. Fail the application attempt. // 4. Verify ServiceMaster recovers. @Test(timeout = 200000) public void testRecoverComponentsAfterRMRestart() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true); conf.setBoolean( YarnConfiguration.RM_WORK_PRESERVING_RECOVERY_ENABLED, true); conf.setLong(YarnConfiguration.NM_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS, 500L); conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, true); conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_USE_RPC, true); conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, YarnConfiguration.DEFAULT_RM_MAX_COMPLETED_APPLICATIONS); setConf(conf); setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); Service exampleApp = createExampleApplication(); client.actionCreate(exampleApp); Multimap<String, String> containersBeforeFailure = waitForAllCompToBeReady(client, exampleApp); LOG.info("Restart the resource manager"); getYarnCluster().restartResourceManager( getYarnCluster().getActiveRMIndex()); GenericTestUtils.waitFor(() -> getYarnCluster().getResourceManager().getServiceState() == org.apache.hadoop.service.Service.STATE.STARTED, 2000, 200000); Assert.assertTrue("node managers connected", getYarnCluster().waitForNodeManagersToConnect(5000)); ApplicationId exampleAppId = ApplicationId.fromString(exampleApp.getId()); ApplicationAttemptId applicationAttemptId = client.getYarnClient() .getApplicationReport(exampleAppId).getCurrentApplicationAttemptId(); LOG.info("Fail the application attempt {}", applicationAttemptId); client.getYarnClient().failApplicationAttempt(applicationAttemptId); //wait until attempt 2 is running GenericTestUtils.waitFor(() -> { try { ApplicationReport ar = client.getYarnClient() .getApplicationReport(exampleAppId); return ar.getCurrentApplicationAttemptId().getAttemptId() == 2 && ar.getYarnApplicationState() == YarnApplicationState.RUNNING; } catch (YarnException | IOException e) { throw new RuntimeException("while waiting", e); } }, 2000, 200000); Multimap<String, String> containersAfterFailure = waitForAllCompToBeReady( client, exampleApp); containersBeforeFailure.keys().forEach(compName -> { Assert.assertEquals("num containers after by restart for " + compName, containersBeforeFailure.get(compName).size(), containersAfterFailure.get(compName) == null ? 0 : containersAfterFailure.get(compName).size()); }); LOG.info("Stop/destroy service {}", exampleApp); client.actionStop(exampleApp.getName(), true); client.actionDestroy(exampleApp.getName()); } @Test(timeout = 200000) public void testUpgrade() throws Exception { setupInternal(NUM_NMS); getConf().setBoolean(YARN_SERVICE_UPGRADE_ENABLED, true); ServiceClient client = createClient(getConf()); Service service = createExampleApplication(); client.actionCreate(service); waitForServiceToBeStable(client, service); // upgrade the service Component component = service.getComponents().iterator().next(); service.setState(ServiceState.UPGRADING); service.setVersion("v2"); component.getConfiguration().getEnv().put("key1", "val1"); client.initiateUpgrade(service); // wait for service to be in upgrade state waitForServiceToBeInState(client, service, ServiceState.UPGRADING); SliderFileSystem fs = new SliderFileSystem(getConf()); Service fromFs = ServiceApiUtil.loadServiceUpgrade(fs, service.getName(), service.getVersion()); Assert.assertEquals(service.getName(), fromFs.getName()); Assert.assertEquals(service.getVersion(), fromFs.getVersion()); // upgrade containers Service liveService = client.getStatus(service.getName()); client.actionUpgrade(service, liveService.getComponent(component.getName()).getContainers()); waitForAllCompToBeReady(client, service); // finalize the upgrade client.actionStart(service.getName()); waitForServiceToBeStable(client, service); Service active = client.getStatus(service.getName()); Assert.assertEquals("component not stable", ComponentState.STABLE, active.getComponent(component.getName()).getState()); Assert.assertEquals("comp does not have new env", "val1", active.getComponent(component.getName()).getConfiguration() .getEnv("key1")); LOG.info("Stop/destroy service {}", service); client.actionStop(service.getName(), true); client.actionDestroy(service.getName()); } @Test(timeout = 200000) public void testExpressUpgrade() throws Exception { setupInternal(NUM_NMS); getConf().setBoolean(YARN_SERVICE_UPGRADE_ENABLED, true); ServiceClient client = createClient(getConf()); Service service = createExampleApplication(); client.actionCreate(service); waitForServiceToBeStable(client, service); // upgrade the service Component component = service.getComponents().iterator().next(); service.setState(ServiceState.EXPRESS_UPGRADING); service.setVersion("v2"); component.getConfiguration().getEnv().put("key1", "val1"); Component component2 = service.getComponent("compb"); component2.getConfiguration().getEnv().put("key2", "val2"); client.actionUpgradeExpress(service); waitForServiceToBeExpressUpgrading(client, service); // wait for upgrade to complete waitForServiceToBeStable(client, service); Service active = client.getStatus(service.getName()); Assert.assertEquals("version mismatch", service.getVersion(), active.getVersion()); Assert.assertEquals("component not stable", ComponentState.STABLE, active.getComponent(component.getName()).getState()); Assert.assertEquals("compa does not have new env", "val1", active.getComponent(component.getName()).getConfiguration() .getEnv("key1")); Assert.assertEquals("compb does not have new env", "val2", active.getComponent(component2.getName()).getConfiguration() .getEnv("key2")); LOG.info("Stop/destroy service {}", service); client.actionStop(service.getName(), true); client.actionDestroy(service.getName()); } @Test(timeout = 200000) public void testCancelUpgrade() throws Exception { setupInternal(NUM_NMS); getConf().setBoolean(YARN_SERVICE_UPGRADE_ENABLED, true); ServiceClient client = createClient(getConf()); Service service = createExampleApplication(); Component component = service.getComponents().iterator().next(); component.getConfiguration().getEnv().put("key1", "val0"); client.actionCreate(service); waitForServiceToBeStable(client, service); // upgrade the service service.setState(ServiceState.UPGRADING); service.setVersion("v2"); component.getConfiguration().getEnv().put("key1", "val1"); client.initiateUpgrade(service); // wait for service to be in upgrade state waitForServiceToBeInState(client, service, ServiceState.UPGRADING); // upgrade 1 container Service liveService = client.getStatus(service.getName()); Container container = liveService.getComponent(component.getName()) .getContainers().iterator().next(); client.actionUpgrade(service, Lists.newArrayList(container)); Thread.sleep(500); // cancel the upgrade client.actionCancelUpgrade(service.getName()); waitForServiceToBeStable(client, service); Service active = client.getStatus(service.getName()); Assert.assertEquals("component not stable", ComponentState.STABLE, active.getComponent(component.getName()).getState()); Assert.assertEquals("comp does not have new env", "val0", active.getComponent(component.getName()).getConfiguration() .getEnv("key1")); LOG.info("Stop/destroy service {}", service); client.actionStop(service.getName(), true); client.actionDestroy(service.getName()); } // Test to verify ANTI_AFFINITY placement policy // 1. Start mini cluster // with 3 NMs and scheduler placement-constraint handler // 2. Create an example service with 3 containers // 3. Verify no more than 1 container comes up in each of the 3 NMs // 4. Flex the component to 4 containers // 5. Verify that the 4th container does not even get allocated since there // are only 3 NMs @Test (timeout = 200000) public void testCreateServiceWithPlacementPolicy() throws Exception { // We need to enable scheduler placement-constraint at the cluster level to // let apps use placement policies. YarnConfiguration conf = new YarnConfiguration(); conf.set(YarnConfiguration.RM_PLACEMENT_CONSTRAINTS_HANDLER, YarnConfiguration.SCHEDULER_RM_PLACEMENT_CONSTRAINTS_HANDLER); conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, YarnConfiguration.DEFAULT_RM_MAX_COMPLETED_APPLICATIONS); setConf(conf); setupInternal(3); ServiceClient client = createClient(getConf()); Service exampleApp = new Service(); exampleApp.setName("example-app"); exampleApp.setVersion("v1"); Component comp = createComponent("compa", 3L, "sleep 1000"); PlacementPolicy pp = new PlacementPolicy(); PlacementConstraint pc = new PlacementConstraint(); pc.setName("CA1"); pc.setTargetTags(Collections.singletonList("compa")); pc.setScope(PlacementScope.NODE); pc.setType(PlacementType.ANTI_AFFINITY); pp.setConstraints(Collections.singletonList(pc)); comp.setPlacementPolicy(pp); exampleApp.addComponent(comp); client.actionCreate(exampleApp); waitForServiceToBeStable(client, exampleApp); // Check service is stable and all 3 containers are running Service service = client.getStatus(exampleApp.getName()); Component component = service.getComponent("compa"); Assert.assertEquals("Service state should be STABLE", ServiceState.STABLE, service.getState()); Assert.assertEquals("3 containers are expected to be running", 3, component.getContainers().size()); // Prepare a map of non-AM containers for later lookup Set<String> nonAMContainerIdSet = new HashSet<>(); for (Container cont : component.getContainers()) { nonAMContainerIdSet.add(cont.getId()); } // Verify that no more than 1 non-AM container came up on each of the 3 NMs Set<String> hosts = new HashSet<>(); ApplicationReport report = client.getYarnClient() .getApplicationReport(ApplicationId.fromString(exampleApp.getId())); GetContainersRequest req = GetContainersRequest .newInstance(report.getCurrentApplicationAttemptId()); ResourceManager rm = getYarnCluster().getResourceManager(); for (ContainerReport contReport : rm.getClientRMService().getContainers(req) .getContainerList()) { if (!nonAMContainerIdSet .contains(contReport.getContainerId().toString())) { continue; } if (hosts.contains(contReport.getNodeHttpAddress())) { Assert.fail("Container " + contReport.getContainerId() + " came up in the same host as another container."); } else { hosts.add(contReport.getNodeHttpAddress()); } } // Flex compa up to 5, which is more containers than the no of NMs Map<String, Long> compCounts = new HashMap<>(); compCounts.put("compa", 5L); exampleApp.getComponent("compa").setNumberOfContainers(5L); client.flexByRestService(exampleApp.getName(), compCounts); try { // 10 secs is enough for the container to be started. The down side of // this test is that it has to wait that long. Setting a higher wait time // will add to the total time taken by tests to run. waitForServiceToBeStable(client, exampleApp, 10000); Assert.fail("Service should not be in a stable state. It should throw " + "a timeout exception."); } catch (Exception e) { // Check that service state is not STABLE and only 3 containers are // running and the fourth one should not get allocated. service = client.getStatus(exampleApp.getName()); component = service.getComponent("compa"); Assert.assertNotEquals("Service state should not be STABLE", ServiceState.STABLE, service.getState()); Assert.assertEquals("Component state should be FLEXING", ComponentState.FLEXING, component.getState()); Assert.assertEquals("3 containers are expected to be running", 3, component.getContainers().size()); } // Flex compa down to 4 now, which is still more containers than the no of // NMs. This tests the usecase that flex down does not kill any of the // currently running containers since the required number of containers are // still higher than the currently running number of containers. However, // component state will still be FLEXING and service state not STABLE. compCounts = new HashMap<>(); compCounts.put("compa", 4L); exampleApp.getComponent("compa").setNumberOfContainers(4L); client.flexByRestService(exampleApp.getName(), compCounts); try { // 10 secs is enough for the container to be started. The down side of // this test is that it has to wait that long. Setting a higher wait time // will add to the total time taken by tests to run. waitForServiceToBeStable(client, exampleApp, 10000); Assert.fail("Service should not be in a stable state. It should throw " + "a timeout exception."); } catch (Exception e) { // Check that service state is not STABLE and only 3 containers are // running and the fourth one should not get allocated. service = client.getStatus(exampleApp.getName()); component = service.getComponent("compa"); Assert.assertNotEquals("Service state should not be STABLE", ServiceState.STABLE, service.getState()); Assert.assertEquals("Component state should be FLEXING", ComponentState.FLEXING, component.getState()); Assert.assertEquals("3 containers are expected to be running", 3, component.getContainers().size()); } // Finally flex compa down to 3, which is exactly the number of containers // currently running. This will bring the component and service states to // STABLE. compCounts = new HashMap<>(); compCounts.put("compa", 3L); exampleApp.getComponent("compa").setNumberOfContainers(3L); client.flexByRestService(exampleApp.getName(), compCounts); waitForServiceToBeStable(client, exampleApp); LOG.info("Stop/destroy service {}", exampleApp); client.actionStop(exampleApp.getName(), true); client.actionDestroy(exampleApp.getName()); } @Test(timeout = 200000) public void testAMSigtermDoesNotKillApplication() throws Exception { runAMSignalTest(SignalContainerCommand.GRACEFUL_SHUTDOWN); } @Test(timeout = 200000) public void testAMSigkillDoesNotKillApplication() throws Exception { runAMSignalTest(SignalContainerCommand.FORCEFUL_SHUTDOWN); } public void runAMSignalTest(SignalContainerCommand signal) throws Exception { setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); Service exampleApp = createExampleApplication(); client.actionCreate(exampleApp); waitForServiceToBeStable(client, exampleApp); Service appStatus1 = client.getStatus(exampleApp.getName()); ApplicationId exampleAppId = ApplicationId.fromString(appStatus1.getId()); YarnClient yarnClient = createYarnClient(getConf()); ApplicationReport applicationReport = yarnClient.getApplicationReport( exampleAppId); ApplicationAttemptId firstAttemptId = applicationReport .getCurrentApplicationAttemptId(); ApplicationAttemptReport attemptReport = yarnClient .getApplicationAttemptReport(firstAttemptId); // the AM should not perform a graceful shutdown since the operation was not // initiated through the service client yarnClient.signalToContainer(attemptReport.getAMContainerId(), signal); GenericTestUtils.waitFor(() -> { try { ApplicationReport ar = client.getYarnClient() .getApplicationReport(exampleAppId); YarnApplicationState state = ar.getYarnApplicationState(); Assert.assertTrue(state == YarnApplicationState.RUNNING || state == YarnApplicationState.ACCEPTED); if (state != YarnApplicationState.RUNNING) { return false; } if (ar.getCurrentApplicationAttemptId() == null || ar.getCurrentApplicationAttemptId().equals(firstAttemptId)) { return false; } Service appStatus2 = client.getStatus(exampleApp.getName()); if (appStatus2.getState() != ServiceState.STABLE) { return false; } Assert.assertEquals(getSortedContainerIds(appStatus1).toString(), getSortedContainerIds(appStatus2).toString()); return true; } catch (YarnException | IOException e) { throw new RuntimeException("while waiting", e); } }, 2000, 200000); } private static List<String> getSortedContainerIds(Service s) { List<String> containerIds = new ArrayList<>(); for (Component component : s.getComponents()) { for (Container container : component.getContainers()) { containerIds.add(container.getId()); } } Collections.sort(containerIds); return containerIds; } // Test to verify component health threshold monitor. It uses anti-affinity // placement policy to make it easier to simulate container failure by // allocating more containers than the no of NMs. // 1. Start mini cluster with 3 NMs and scheduler placement-constraint handler // 2. Create an example service of 3 containers with anti-affinity placement // policy and health threshold = 65%, window = 3 secs, init-delay = 0 secs, // poll-frequency = 1 secs // 3. Flex the component to 4 containers. This makes health = 75%, so based on // threshold the service will continue to run beyond the window of 3 secs. // 4. Flex the component to 5 containers. This makes health = 60%, so based on // threshold the service will be stopped after the window of 3 secs. @Test (timeout = 200000) public void testComponentHealthThresholdMonitor() throws Exception { // We need to enable scheduler placement-constraint at the cluster level to // let apps use placement policies. YarnConfiguration conf = new YarnConfiguration(); conf.set(YarnConfiguration.RM_PLACEMENT_CONSTRAINTS_HANDLER, YarnConfiguration.SCHEDULER_RM_PLACEMENT_CONSTRAINTS_HANDLER); conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, YarnConfiguration.DEFAULT_RM_MAX_COMPLETED_APPLICATIONS); setConf(conf); setupInternal(3); ServiceClient client = createClient(getConf()); Service exampleApp = new Service(); exampleApp.setName("example-app"); exampleApp.setVersion("v1"); Component comp = createComponent("compa", 3L, "sleep 1000"); PlacementPolicy pp = new PlacementPolicy(); PlacementConstraint pc = new PlacementConstraint(); pc.setName("CA1"); pc.setTargetTags(Collections.singletonList("compa")); pc.setScope(PlacementScope.NODE); pc.setType(PlacementType.ANTI_AFFINITY); pp.setConstraints(Collections.singletonList(pc)); comp.setPlacementPolicy(pp); Configuration config = new Configuration(); config.setProperty(CONTAINER_HEALTH_THRESHOLD_PERCENT, "65"); config.setProperty(CONTAINER_HEALTH_THRESHOLD_WINDOW_SEC, "3"); config.setProperty(CONTAINER_HEALTH_THRESHOLD_INIT_DELAY_SEC, "0"); config.setProperty(CONTAINER_HEALTH_THRESHOLD_POLL_FREQUENCY_SEC, "1"); config.setProperty(DEFAULT_READINESS_CHECK_ENABLED, "false"); comp.setConfiguration(config); exampleApp.addComponent(comp); // Make sure AM does not come up after service is killed for this test Configuration serviceConfig = new Configuration(); serviceConfig.setProperty(AM_RESTART_MAX, "1"); exampleApp.setConfiguration(serviceConfig); client.actionCreate(exampleApp); waitForServiceToBeStable(client, exampleApp); // Check service is stable and all 3 containers are running Service service = client.getStatus(exampleApp.getName()); Component component = service.getComponent("compa"); Assert.assertEquals("Service state should be STABLE", ServiceState.STABLE, service.getState()); Assert.assertEquals("3 containers are expected to be running", 3, component.getContainers().size()); // Flex compa up to 4 - will make health 75% (3 out of 4 running), but still // above threshold of 65%, so service will continue to run. Map<String, Long> compCounts = new HashMap<>(); compCounts.put("compa", 4L); exampleApp.getComponent("compa").setNumberOfContainers(4L); client.flexByRestService(exampleApp.getName(), compCounts); try { // Wait for 6 secs (window 3 secs + 1 for next poll + 2 for buffer). Since // the service will never go to stable state (because of anti-affinity the // 4th container will never be allocated) it will timeout. However, after // the timeout the service should continue to run since health is 75% // which is above the threshold of 65%. waitForServiceToBeStable(client, exampleApp, 6000); Assert.fail("Service should not be in a stable state. It should throw " + "a timeout exception."); } catch (Exception e) { // Check that service state is STARTED and only 3 containers are running service = client.getStatus(exampleApp.getName()); component = service.getComponent("compa"); Assert.assertEquals("Service state should be STARTED", ServiceState.STARTED, service.getState()); Assert.assertEquals("Component state should be FLEXING", ComponentState.FLEXING, component.getState()); Assert.assertEquals("3 containers are expected to be running", 3, component.getContainers().size()); } // Flex compa up to 5 - will make health 60% (3 out of 5 running), so // service will stop since it is below threshold of 65%. compCounts.put("compa", 5L); exampleApp.getComponent("compa").setNumberOfContainers(5L); client.flexByRestService(exampleApp.getName(), compCounts); try { // Wait for 14 secs (window 3 secs + 1 for next poll + 2 for buffer + 5 // secs of service wait before shutting down + 3 secs app cleanup so that // API returns that service is in FAILED state). Note, because of // anti-affinity the 4th and 5th container will never be allocated. waitForServiceToBeInState(client, exampleApp, ServiceState.FAILED, 14000); } catch (Exception e) { Assert.fail("Should not have thrown exception"); } LOG.info("Destroy service {}", exampleApp); client.actionDestroy(exampleApp.getName()); } // Check containers launched are in dependency order // Get all containers into a list and sort based on container launch time e.g. // compa-c1, compa-c2, compb-c1, compb-c2; // check that the container's launch time are align with the dependencies. private void checkContainerLaunchDependencies(ServiceClient client, Service exampleApp, String... compOrder) throws IOException, YarnException { Service retrievedApp = client.getStatus(exampleApp.getName()); List<Container> containerList = new ArrayList<>(); for (Component component : retrievedApp.getComponents()) { containerList.addAll(component.getContainers()); } // sort based on launchTime containerList .sort((o1, o2) -> o1.getLaunchTime().compareTo(o2.getLaunchTime())); LOG.info("containerList: " + containerList); // check the containers are in the dependency order. int index = 0; for (String comp : compOrder) { long num = retrievedApp.getComponent(comp).getNumberOfContainers(); for (int i = 0; i < num; i++) { String compInstanceName = containerList.get(index).getComponentInstanceName(); String compName = compInstanceName.substring(0, compInstanceName.lastIndexOf('-')); Assert.assertEquals(comp, compName); index++; } } } private Map<String, Long> flexComponents(ServiceClient client, Service exampleApp, long count) throws YarnException, IOException { Map<String, Long> compCounts = new HashMap<>(); compCounts.put("compa", count); compCounts.put("compb", count); // flex will update the persisted conf to reflect latest number of containers. exampleApp.getComponent("compa").setNumberOfContainers(count); exampleApp.getComponent("compb").setNumberOfContainers(count); client.flexByRestService(exampleApp.getName(), compCounts); return compCounts; } // Check each component's comp instances name are in sequential order. // E.g. If there are two instances compA-1 and compA-2 // When flex up to 4 instances, it should be compA-1 , compA-2, compA-3, compA-4 // When flex down to 3 instances, it should be compA-1 , compA-2, compA-3. private void checkCompInstancesInOrder(ServiceClient client, Service exampleApp) throws IOException, YarnException, TimeoutException, InterruptedException { waitForContainers(client, exampleApp); Service service = client.getStatus(exampleApp.getName()); for (Component comp : service.getComponents()) { checkEachCompInstancesInOrder(comp, exampleApp.getName()); } } private void waitForContainers(ServiceClient client, Service exampleApp) throws TimeoutException, InterruptedException { GenericTestUtils.waitFor(() -> { try { Service service = client.getStatus(exampleApp.getName()); for (Component comp : service.getComponents()) { if (comp.getContainers().size() != comp.getNumberOfContainers()) { return false; } } return true; } catch (Exception e) { return false; } }, 2000, 200000); } private void checkEachCompInstancesInOrder(Component component, String serviceName) throws TimeoutException, InterruptedException { TreeSet<String> instances = new TreeSet<>(); for (Container container : component.getContainers()) { instances.add(container.getComponentInstanceName()); String componentZKPath = RegistryUtils.componentPath(RegistryUtils .currentUser(), YarnServiceConstants.APP_TYPE, serviceName, RegistryPathUtils.encodeYarnID(container.getId())); GenericTestUtils.waitFor(() -> { try { return getCuratorService().zkPathExists(componentZKPath); } catch (IOException e) { return false; } }, 1000, 60000); } int i = 0; for (String s : instances) { assertThat(s).isEqualTo(component.getName() + "-" + i); i++; } } @Test (timeout = 200000) public void testRestartServiceForNonExistingInRM() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 0); setConf(conf); setupInternal(NUM_NMS); ServiceClient client = createClient(getConf()); Service exampleApp = createExampleApplication(); client.actionCreate(exampleApp); waitForServiceToBeStable(client, exampleApp); try { client.actionStop(exampleApp.getName(), true); } catch (ApplicationNotFoundException e) { LOG.info("ignore ApplicationNotFoundException during stopping"); } client.actionStart(exampleApp.getName()); waitForServiceToBeStable(client, exampleApp); Service service = client.getStatus(exampleApp.getName()); Assert.assertEquals("Restarted service state should be STABLE", ServiceState.STABLE, service.getState()); } }
package com.gmail.St3venAU.plugins.ArmorStandTools; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.entity.ArmorStand; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.HandlerList; import org.bukkit.event.Listener; import org.bukkit.event.inventory.ClickType; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryCloseEvent; import org.bukkit.event.inventory.InventoryDragEvent; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.inventory.meta.SkullMeta; import org.bukkit.scheduler.BukkitRunnable; import java.util.HashSet; import java.util.UUID; class ArmorStandGUI implements Listener { private static final HashSet<Integer> inUse = new HashSet<Integer>(); private static final HashSet<Integer> invSlots = new HashSet<Integer>(); private static ItemStack filler; private Inventory i; private ArmorStand as; private Main plugin; ArmorStandGUI(Main plugin, ArmorStand as, Player p) { if(inUse.contains(as.getEntityId())) { p.sendMessage(ChatColor.RED + Config.guiInUse); return; } if(filler == null) { filler = new ItemStack(Material.STAINED_GLASS_PANE, 1, (short) 15); ItemMeta im = filler.getItemMeta(); im.setDisplayName(" "); filler.setItemMeta(im); invSlots.add(0); invSlots.add(9); invSlots.add(10); invSlots.add(18); invSlots.add(27); } this.plugin = plugin; plugin.getServer().getPluginManager().registerEvents(this, plugin); this.as = as; String name = as.getCustomName(); if(name == null) { name = Config.armorStand; } else if(name.length() > 32) { name = name.substring(0, 32); } i = Bukkit.createInventory(null, 36, name); for(int slot = 0; slot < i.getSize(); slot++) { i.setItem(slot, filler); } for(ArmorStandTool tool : ArmorStandTool.values()) { if(tool.isForGui() && tool.isEnabled()) { i.setItem(tool.getSlot(), updateLore(tool)); } } i.setItem(0, as.getHelmet()); i.setItem(9, as.getChestplate()); i.setItem(10, as.getItemInHand()); i.setItem(18, as.getLeggings()); i.setItem(27, as.getBoots()); inUse.add(as.getEntityId()); p.openInventory(i); } private ItemStack updateLore(ArmorStandTool tool) { ItemStack item = tool.getItem(); switch (tool) { case INVIS: return Utils.setLore(item, ChatColor.AQUA + Config.asVisible + ": " + (as.isVisible() ? (ChatColor.GREEN + Config.isTrue) : (ChatColor.RED + Config.isFalse))); case SIZE: return Utils.setLore(item, ChatColor.AQUA + Config.size + ": " + (as.isSmall() ? (ChatColor.BLUE + Config.small) : (ChatColor.GREEN + Config.normal))); case BASE: return Utils.setLore(item, ChatColor.AQUA + Config.basePlate + ": " + (as.hasBasePlate() ? (ChatColor.GREEN + Config.isOn) : (ChatColor.RED + Config.isOff))); case GRAV: return Utils.setLore(item, ChatColor.AQUA + Config.gravity + ": " + (as.hasGravity() ? (ChatColor.GREEN + Config.isOn) : (ChatColor.RED + Config.isOff))); case ARMS: return Utils.setLore(item, ChatColor.AQUA + Config.arms + ": " + (as.hasArms() ? (ChatColor.GREEN + Config.isOn) : (ChatColor.RED + Config.isOff))); case INVUL: return Utils.setLore(item, ChatColor.AQUA + Config.invul + ": " + (NBT.isInvulnerable(as) ? (ChatColor.GREEN + Config.isOn) : (ChatColor.RED + Config.isOff))); case SLOTS: return Utils.setLore(item, ChatColor.AQUA + Config.equip + ": " + (NBT.getDisabledSlots(as) == 2039583 ? (ChatColor.GREEN + Config.locked) : (ChatColor.RED + Config.unLocked))); case NODEL: return Utils.setLore(item, ChatColor.AQUA + "Deletion Protection: " + (as.getMaxHealth() == 50 ? (ChatColor.GREEN + Config.enabled) : (ChatColor.RED + Config.disabled))); case NAME: return Utils.setLore(item, ChatColor.AQUA + Config.currently + ": " + (as.getCustomName() == null ? (ChatColor.BLUE + Config.none) : (ChatColor.GREEN + as.getCustomName()))); case PHEAD: String name = plrHeadName(as); return Utils.setLore(item, ChatColor.AQUA + Config.currently + ": " + (name == null ? (ChatColor.BLUE + Config.none) : (ChatColor.GREEN + name))); default: return item; } } private String plrHeadName(ArmorStand as) { if(as.getHelmet() == null) return null; if(!(as.getHelmet().getItemMeta() instanceof SkullMeta)) return null; SkullMeta meta = (SkullMeta) as.getHelmet().getItemMeta(); if(!meta.hasOwner()) return null; return meta.getOwner(); } @EventHandler public void inInventoryClose(InventoryCloseEvent event) { if(!event.getInventory().equals(i)) return; HandlerList.unregisterAll(this); inUse.remove(as.getEntityId()); } @EventHandler public void onInventoryClick(InventoryClickEvent event) { if(!event.getInventory().equals(i)) return; Player p = (Player) event.getWhoClicked(); if(event.getClick() == ClickType.SHIFT_LEFT || event.getClick() == ClickType.SHIFT_RIGHT || event.getClick() == ClickType.NUMBER_KEY) { event.setCancelled(true); return; } int slot = event.getRawSlot(); if(slot > i.getSize()) return; if(invSlots.contains(slot)) { if(plugin.checkBlockPermission(p, as.getLocation().getBlock())) { updateInventory(); } else { event.setCancelled(true); p.sendMessage(ChatColor.RED + Config.wgNoPerm); } return; } event.setCancelled(true); if(!(event.getWhoClicked() instanceof Player)) return; ArmorStandTool t = ArmorStandTool.get(event.getCurrentItem()); if(t == null) return; if (!plugin.playerHasPermission(p, as.getLocation().getBlock(), t)) { p.sendMessage(ChatColor.RED + Config.generalNoPerm); return; } switch (t) { case INVIS: as.setVisible(!as.isVisible()); Utils.actionBarMsg(p, ChatColor.GREEN + Config.asVisible + ": " + (as.isVisible() ? Config.isTrue : Config.isFalse)); break; case CLONE: p.closeInventory(); plugin.pickUpArmorStand(plugin.clone(as), p, true); Utils.actionBarMsg(p, ChatColor.GREEN + Config.carrying); break; case SAVE: plugin.generateCmdBlock(p.getLocation(), as); Utils.actionBarMsg(p, ChatColor.GREEN + Config.cbCreated); break; case SIZE: as.setSmall(!as.isSmall()); Utils.actionBarMsg(p, ChatColor.GREEN + Config.size + ": " + (as.isSmall() ? Config.small : Config.normal)); break; case BASE: as.setBasePlate(!as.hasBasePlate()); Utils.actionBarMsg(p, ChatColor.GREEN + Config.basePlate + ": " + (as.hasBasePlate() ? Config.isOn : Config.isOff)); break; case GRAV: as.setGravity(!as.hasGravity()); Utils.actionBarMsg(p, ChatColor.GREEN + Config.gravity + ": " + (as.hasGravity() ? Config.isOn : Config.isOff)); break; case ARMS: as.setArms(!as.hasArms()); Utils.actionBarMsg(p, ChatColor.GREEN + Config.arms + ": " + (as.hasArms() ? Config.isOn : Config.isOff)); break; case NAME: p.closeInventory(); plugin.setName(p, as); break; case PHEAD: p.closeInventory(); plugin.setPlayerSkull(p, as); break; case INVUL: Utils.actionBarMsg(p, ChatColor.GREEN + Config.invul + ": " + (NBT.toggleInvulnerability(as) ? Config.isOn : Config.isOff)); break; case SLOTS: Utils.actionBarMsg(p, ChatColor.GREEN + Config.equip + ": " + (NBT.toggleSlotsDisabled(as) ? Config.locked : Config.unLocked)); break; case MOVE: p.closeInventory(); UUID uuid = p.getUniqueId(); if(plugin.carryingArmorStand.containsKey(uuid)) { plugin.carryingArmorStand.remove(uuid); Utils.actionBarMsg(p, Config.asDropped); } else { plugin.pickUpArmorStand(as, p, false); Utils.actionBarMsg(p, ChatColor.GREEN + Config.carrying); } break; case NODEL: // Developer tool - do not use if(as.getMaxHealth() == 50) { as.setMaxHealth(20); Utils.actionBarMsg(p, ChatColor.GREEN + "Deletion Protection: Disabled"); } else { as.setMaxHealth(50); Utils.actionBarMsg(p, ChatColor.GREEN + "Deletion Protection: Enabled"); } break; default: return; } i.setItem(t.getSlot(), updateLore(t)); } @EventHandler public void onInventoryDrag(InventoryDragEvent event) { if(!event.getInventory().equals(i) || !(event.getWhoClicked() instanceof Player)) return; Player p = (Player) event.getWhoClicked(); boolean invModified = false; for(int slot : event.getRawSlots()) { if(slot < i.getSize()) { if(invSlots.contains(slot)) { invModified = true; } else { event.setCancelled(true); return; } } } if(invModified) { if(plugin.checkBlockPermission(p, as.getLocation().getBlock())) { updateInventory(); } else { event.setCancelled(true); p.sendMessage(ChatColor.RED + Config.wgNoPerm); } } } private void updateInventory() { new BukkitRunnable() { @Override public void run() { if(as == null || i == null) return; as.setHelmet(i.getItem(0)); as.setChestplate(i.getItem(9)); as.setItemInHand(i.getItem(10)); as.setLeggings(i.getItem(18)); as.setBoots(i.getItem(27)); } }.runTaskLater(plugin, 1L); } }