gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal.cache.snapshot; import static com.gemstone.gemfire.distributed.internal.InternalDistributedSystem.getLoggerI18n; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.RegionEvent; import com.gemstone.gemfire.cache.RegionMembershipListener; import com.gemstone.gemfire.cache.util.RegionMembershipListenerAdapter; import com.gemstone.gemfire.distributed.DistributedMember; import com.gemstone.gemfire.distributed.internal.DM; import com.gemstone.gemfire.distributed.internal.DistributionManager; import com.gemstone.gemfire.distributed.internal.DistributionMessage; import com.gemstone.gemfire.distributed.internal.ProcessorKeeper21; import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember; import com.gemstone.gemfire.internal.InternalDataSerializer; /** * Provides flow control using permits based on the sliding window * algorithm. The sender should invoke {@link #create(Region, DistributedMember, int)} * while the recipient should respond with {@link #sendAck(DM, DistributedMember, int, String)} * or {@link #sendAbort(DM, int, DistributedMember)}. * * @author bakera */ public class FlowController { // watch out for rollover problems with MAX_VALUE private static final int MAX_PERMITS = Integer.MAX_VALUE / 2; /** * Provides a callback interface for sliding window flow control. */ public interface Window { /** * Returns the window id. * @return the window id */ int getWindowId(); /** * Returns true if the operation has been aborted. * @return true if aborted */ boolean isAborted(); /** * Returns true if the window is open and {{@link #waitForOpening()} will * return immediately. * * @return true if open */ boolean isOpen(); /** * Blocks until the window is open. * @throws InterruptedException Interrupted while waiting */ void waitForOpening() throws InterruptedException; /** * Closes the window and releases resources. */ void close(); } /** the singleton */ private static final FlowController instance = new FlowController(); public static FlowController getInstance() { return instance; } /** keeps a weak ref to {@link WindowImpl} implementations */ private final ProcessorKeeper21 processors; private FlowController() { processors = new ProcessorKeeper21(); } /** * Creates and registers a {@link Window} that provides flow control. * * @param region the region * @param sink the data recipient * @param windowSize the size of the sliding window * * @see #sendAbort(DM, int, DistributedMember) * @see #sendAck(DM, DistributedMember, int, String) */ public <K, V> Window create(Region<K, V> region, DistributedMember sink, int windowSize) { WindowImpl<K, V> w = new WindowImpl<K, V>(region, sink, windowSize); int id = processors.put(w); w.setWindowId(id); return w; } /** * Sends an ACK to allow the source to continue sending messages. * * @param dmgr the distribution manager * @param member the data source * @param windowId the window * @param packetId the packet being ACK'd */ public void sendAck(DM dmgr, DistributedMember member, int windowId, String packetId) { if (getLoggerI18n().fineEnabled()) getLoggerI18n().fine("SNP: Sending ACK for packet " + packetId + " on window " + windowId + " to member " + member); if (dmgr.getDistributionManagerId().equals(member)) { WindowImpl<?, ?> win = (WindowImpl<?, ?>) processors.retrieve(windowId); if (win != null) { win.ack(packetId); } } else { FlowControlAckMessage ack = new FlowControlAckMessage(windowId, packetId); ack.setRecipient((InternalDistributedMember) member); dmgr.putOutgoing(ack); } } /** * Aborts further message processing. * * @param dmgr the distribution manager * @param windowId the window * @param member the data source */ public void sendAbort(DM dmgr, int windowId, DistributedMember member) { if (getLoggerI18n().fineEnabled()) getLoggerI18n().fine("SNP: Sending ABORT to member " + member + " for window " + windowId); if (dmgr.getDistributionManagerId().equals(member)) { WindowImpl<?, ?> win = (WindowImpl<?, ?>) processors.retrieve(windowId); if (win != null) { win.abort(); } } else { FlowControlAbortMessage abort = new FlowControlAbortMessage(windowId); abort.setRecipient((InternalDistributedMember) member); dmgr.putOutgoing(abort); } } private static class WindowImpl<K, V> implements Window { /** controls access to the window */ private final Semaphore permits; /** true if aborted */ private final AtomicBoolean abort; /** the region (used to manage membership) */ private final Region<K, V> region; /** the membership listener */ private final RegionMembershipListener<K, V> crash; /** the window id */ private volatile int windowId; public WindowImpl(Region<K, V> region, final DistributedMember sink, int size) { permits = new Semaphore(size); abort = new AtomicBoolean(false); this.region = region; crash = new RegionMembershipListenerAdapter<K, V>() { @Override public void afterRemoteRegionCrash(RegionEvent<K, V> event) { if (event.getDistributedMember().equals(sink)) { if (getLoggerI18n().fineEnabled()) getLoggerI18n().fine("SNP: " + sink + " has crashed, closing window"); abort(); } } }; region.getAttributesMutator().addCacheListener(crash); } @Override public void close() { instance.processors.remove(windowId); region.getAttributesMutator().removeCacheListener(crash); permits.release(MAX_PERMITS); } @Override public int getWindowId() { return windowId; } @Override public boolean isAborted() { return abort.get(); } @Override public boolean isOpen() { return permits.availablePermits() > 0; } @Override public void waitForOpening() throws InterruptedException { permits.acquire(); } private void ack(String packetId) { permits.release(); } private void abort() { abort.set(true); permits.release(MAX_PERMITS); } private void setWindowId(int id) { windowId = id; } } /** * Sent to abort message processing. * * @see Window#isAborted() * @see FlowController#sendAbort(DM, int, DistributedMember) */ public static class FlowControlAbortMessage extends DistributionMessage { /** the window id */ private int windowId; public FlowControlAbortMessage(int windowId) { this.windowId = windowId; } /** for deserialization */ public FlowControlAbortMessage() { } @Override public int getDSFID() { return FLOW_CONTROL_ACK; } @Override public int getProcessorType() { return DistributionManager.STANDARD_EXECUTOR; } @Override protected void process(DistributionManager dm) { if (getLoggerI18n().fineEnabled()) getLoggerI18n().fine("SNP: Received ABORT on window " + windowId + " from member " + getSender()); WindowImpl<?, ?> win = (WindowImpl<?, ?>) FlowController.getInstance().processors.retrieve(windowId); if (win != null) { win.abort(); } } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); windowId = in.readInt(); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); out.writeInt(windowId); } } /** * Sent to acknowledge receipt of a message packet. * * @see FlowController#sendAck(DM, DistributedMember, int, String) */ public static class FlowControlAckMessage extends DistributionMessage { /** the window id */ private int windowId; /** the packet id */ private String packetId; public FlowControlAckMessage(int windowId, String packetId) { this.windowId = windowId; this.packetId = packetId; } /** for deserialization */ public FlowControlAckMessage() { } @Override public int getDSFID() { return FLOW_CONTROL_ACK; } @Override public int getProcessorType() { return DistributionManager.STANDARD_EXECUTOR; } @Override protected void process(DistributionManager dm) { if (getLoggerI18n().fineEnabled()) getLoggerI18n().fine("SNP: Received ACK for packet " + packetId + " on window " + windowId + " from member " + getSender()); WindowImpl<?, ?> win = (WindowImpl<?, ?>) FlowController.getInstance().processors.retrieve(windowId); if (win != null) { win.ack(packetId); } } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); windowId = in.readInt(); packetId = InternalDataSerializer.readString(in); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); out.writeInt(windowId); InternalDataSerializer.writeString(packetId, out); } } }
package org.hisp.dhis.validation; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hisp.dhis.common.DimensionItemType; import org.hisp.dhis.common.DimensionalItemObject; import org.hisp.dhis.common.IdentifiableObject; import org.hisp.dhis.common.IdentifiableObjectManager; import org.hisp.dhis.common.MapMap; import org.hisp.dhis.common.SetMap; import org.hisp.dhis.constant.ConstantService; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataelement.DataElementCategoryService; import org.hisp.dhis.dataelement.DataElementOperand; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.datavalue.DataValue; import org.hisp.dhis.datavalue.DataValueService; import org.hisp.dhis.expression.ExpressionService; import org.hisp.dhis.i18n.I18nFormat; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodService; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.program.Program; import org.hisp.dhis.program.ProgramDataElementDimensionItem; import org.hisp.dhis.program.ProgramIndicator; import org.hisp.dhis.program.ProgramTrackedEntityAttributeDimensionItem; import org.hisp.dhis.setting.SettingKey; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.system.util.Clock; import org.hisp.dhis.trackedentity.TrackedEntityAttribute; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.hisp.dhis.validation.notification.ValidationNotificationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.transaction.annotation.Transactional; import java.util.*; import java.util.stream.Collectors; import static org.hisp.dhis.common.DimensionItemType.*; import static org.hisp.dhis.common.DimensionalObjectUtils.COMPOSITE_DIM_OBJECT_ESCAPED_SEP; import static org.hisp.dhis.commons.util.TextUtils.splitSafe; /** * @author Jim Grace * @author Stian Sandvold */ @Transactional public class DefaultValidationService implements ValidationService { private static final Log log = LogFactory.getLog( DefaultValidationService.class ); private static final ImmutableSet<DimensionItemType> EVENT_DIM_ITEM_TYPES = ImmutableSet.of( PROGRAM_DATA_ELEMENT, PROGRAM_ATTRIBUTE, PROGRAM_INDICATOR ); @Autowired private PeriodService periodService; @Autowired private ExpressionService expressionService; @Autowired private IdentifiableObjectManager idObjectManager; @Autowired private DataValueService dataValueService; @Autowired private DataElementCategoryService categoryService; @Autowired private ConstantService constantService; @Autowired private OrganisationUnitService organisationUnitService; @Autowired private ValidationNotificationService notificationService; @Autowired private SystemSettingManager systemSettingManager; @Autowired private ValidationRuleService validationRuleService; @Autowired private ApplicationContext applicationContext; private CurrentUserService currentUserService; public void setCurrentUserService( CurrentUserService currentUserService ) { this.currentUserService = currentUserService; } // ------------------------------------------------------------------------- // ValidationRule business logic // ------------------------------------------------------------------------- @Override public Collection<ValidationResult> startInteractiveValidationAnalysis( Date startDate, Date endDate, List<OrganisationUnit> orgUnits, DataElementCategoryOptionCombo attributeOptionCombo, ValidationRuleGroup group, boolean sendNotifications, I18nFormat format ) { Clock clock = new Clock( log ).startClock().logTime( "Starting interactive validation run." ); Collection<Period> periods = periodService.getPeriodsBetweenDates( startDate, endDate ); Collection<ValidationRule> rules = group != null ? group.getMembers() : validationRuleService.getAllValidationRules(); ValidationRunContext context = getValidationContext( orgUnits, periods, rules ) .withAttributeCombo( attributeOptionCombo ) .withMaxResults( MAX_INTERACTIVE_ALERTS ) .withSendNotifications( sendNotifications ) .build(); clock.logTime( "Initialized interactive validation run." ); Collection<ValidationResult> validationResults = startValidationAnalysis( context ); clock.logTime( "Finished interactive validation run." ); return validationResults; } @Override public Collection<ValidationResult> startInteractiveValidationAnalysis( DataSet dataSet, Period period, OrganisationUnit orgUnit, DataElementCategoryOptionCombo attributeOptionCombo ) { Collection<ValidationRule> rules = validationRuleService .getValidationRulesForDataElements( dataSet.getDataElements() ); List<OrganisationUnit> orgUnits = Lists.newArrayList( orgUnit ); Collection<Period> periods = Sets.newHashSet( period ); ValidationRunContext context = getValidationContext( orgUnits, periods, rules ) .withAttributeCombo( attributeOptionCombo ) .withMaxResults( MAX_INTERACTIVE_ALERTS ) .build(); Collection<ValidationResult> validationResults = startValidationAnalysis( context ); return validationResults; } @Override public void startScheduledValidationAnalysis() { List<OrganisationUnit> orgUnits = organisationUnitService.getAllOrganisationUnits(); // Find all rules which might generate notifications Set<ValidationRule> rules = getValidationRulesWithNotificationTemplates(); Set<Period> periods = extractNotificationPeriods( rules ); Clock clock = new Clock( log ).startClock().logTime( "Starting scheduled validation run, orgUnits: " + orgUnits.size() + ", periods: " + periods.size() + ", rules:" + rules.size() ); // TODO: We are not actively using LAST_MONITORING_RUN anymore, remove when sure we don't need it. systemSettingManager.saveSystemSetting( SettingKey.LAST_MONITORING_RUN, new Date() ); ValidationRunContext context = getValidationContext( orgUnits, periods, rules ) .withPersistResults( true ) .withMaxResults( MAX_SCHEDULED_ALERTS ) .withSendNotifications( false ) .build(); clock.logTime( "Initialized scheduled validation run." ); startValidationAnalysis( context ); clock.logTime( "Finished scheduled validation run." ); } private Collection<ValidationResult> startValidationAnalysis( ValidationRunContext context ) { Collection<ValidationResult> results = Validator.validate( context, applicationContext ); log.info( "Send Notifications: " + context.isSendNotifications() ); log.info( "Violations: " + context.getValidationResults().size() ); if ( context.isSendNotifications() ) { notificationService.sendNotifications( Sets.newHashSet( results ) ); } return results; } @Override public List<DataElementOperand> validateRequiredComments( DataSet dataSet, Period period, OrganisationUnit organisationUnit, DataElementCategoryOptionCombo attributeOptionCombo ) { List<DataElementOperand> violations = new ArrayList<>(); if ( dataSet.isNoValueRequiresComment() ) { for ( DataElement de : dataSet.getDataElements() ) { for ( DataElementCategoryOptionCombo co : de.getCategoryOptionCombos() ) { DataValue dv = dataValueService .getDataValue( de, period, organisationUnit, co, attributeOptionCombo ); boolean missingValue = dv == null || StringUtils.trimToNull( dv.getValue() ) == null; boolean missingComment = dv == null || StringUtils.trimToNull( dv.getComment() ) == null; if ( missingValue && missingComment ) { violations.add( new DataElementOperand( de, co ) ); } } } } return violations; } // ------------------------------------------------------------------------- // Supportive methods // ------------------------------------------------------------------------- private Set<ValidationRule> getValidationRulesWithNotificationTemplates() { return Sets.newHashSet( validationRuleService.getValidationRulesWithNotificationTemplates() ); } /** * Get the current and most recent periods to use when performing validation * for generating notifications (previously 'alerts'). The periods are * filtered against existing (persisted) periods. * <p> * TODO Consider: * This method assumes that the last successful validation run was one day ago. * If this is not the case (more than one day ago) adding additional (daily) * periods to 'fill the gap' could be considered. */ private Set<Period> extractNotificationPeriods( Set<ValidationRule> rules ) { return rules.stream() .map( rule -> periodService.getPeriodTypeByName( rule.getPeriodType().getName() ) ) .map( periodType -> { Period current = periodType.createPeriod(), previous = periodType.getPreviousPeriod( current ); Date start = previous.getStartDate(), end = current.getEndDate(); return periodService.getIntersectingPeriodsByPeriodType( periodType, start, end ); } ) .flatMap( Collection::stream ) .collect( Collectors.toSet() ); } /** * Gets the event dimension item for the validation rules. * * @param dimensionItemMap map from UIDs to all dimension items. * @return Set with all event dimension items. */ private Set<DimensionalItemObject> getEventItems( Map<String, DimensionalItemObject> dimensionItemMap ) { return dimensionItemMap.values().stream() .filter( di -> EVENT_DIM_ITEM_TYPES.contains( di.getDimensionItemType() ) ) .collect( Collectors.toSet() ); } /** * Returns a new Builder with basic configuration based on the input parameters. * * @param orgUnits organisation units to include in analysis. * @param periods periods to include in analysis. * @param validationRules rules to include in analysis. * @return Builder with basic configuration based on input. */ private ValidationRunContext.Builder getValidationContext( List<OrganisationUnit> orgUnits, Collection<Period> periods, Collection<ValidationRule> validationRules ) { User currentUser = currentUserService.getCurrentUser(); Map<PeriodType, PeriodTypeExtended> periodTypeExtendedMap = new HashMap<>(); addPeriodsToContext( periodTypeExtendedMap, periods ); Map<String, DimensionalItemObject> dimensionItemMap = addRulesToContext( periodTypeExtendedMap, validationRules ); removeAnyUnneededPeriodTypes( periodTypeExtendedMap ); addOrgUnitsToContext( periodTypeExtendedMap, orgUnits ); ValidationRunContext.Builder builder = ValidationRunContext.newBuilder() .withPeriodTypeExtendedMap( periodTypeExtendedMap ) .withOrgUnits( orgUnits ) .withEventItems( getEventItems( dimensionItemMap ) ) .withConstantMap( constantService.getConstantMap() ); if ( currentUser != null ) { builder .withCoDimensionConstraints( categoryService.getCoDimensionConstraints( currentUser.getUserCredentials() ) ) .withCogDimensionConstraints( categoryService.getCogDimensionConstraints( currentUser.getUserCredentials() ) ); } return builder; } /** * Adds Periods to the context, grouped by period type. * * @param periodTypeExtendedMap period type map to extended period types. * @param periods periods to group and add. */ private void addPeriodsToContext( Map<PeriodType, PeriodTypeExtended> periodTypeExtendedMap, Collection<Period> periods ) { for ( Period period : periods ) { PeriodTypeExtended periodTypeX = getOrCreatePeriodTypeExtended( periodTypeExtendedMap, period.getPeriodType() ); periodTypeX.getPeriods().add( period ); } } /** * Adds validation rules to the context. * * @param periodTypeExtendedMap period type map to extended period types. * @param rules validation rules to add. */ private Map<String, DimensionalItemObject> addRulesToContext( Map<PeriodType, PeriodTypeExtended> periodTypeExtendedMap, Collection<ValidationRule> rules ) { // 1. Find all dimensional object IDs in the expressions of the validation rules. SetMap<Class<? extends DimensionalItemObject>, String> allItemIds = new SetMap<>(); SetMap<ValidationRule, String> ruleItemIds = new SetMap<>(); for ( ValidationRule rule : rules ) { if ( periodTypeExtendedMap.get( rule.getPeriodType() ) == null ) { continue; // Don't include rules for which there are no periods. } SetMap<Class<? extends DimensionalItemObject>, String> dimensionItemIdentifiers = expressionService.getDimensionalItemIdsInExpression( rule.getLeftSide().getExpression() ); dimensionItemIdentifiers.putValues( expressionService.getDimensionalItemIdsInExpression( rule.getRightSide().getExpression() ) ); Set<String> ruleIds = dimensionItemIdentifiers.values().stream().reduce( new HashSet<>(), ( x, y ) -> Sets.union( x, y ) ); ruleItemIds.putValues( rule, ruleIds ); allItemIds.putValues( dimensionItemIdentifiers ); } // 2. Get the dimensional objects from the IDs. (Get them all at once for best performance.) Map<String, DimensionalItemObject> dimensionItemMap = getDimensionalItemObjects( allItemIds ); // 3. Save the dimensional objects in the validation context. for ( ValidationRule rule : rules ) { PeriodTypeExtended periodTypeX = periodTypeExtendedMap.get( rule.getPeriodType() ); if ( periodTypeX == null ) { continue; } ValidationRuleExtended ruleX = new ValidationRuleExtended( rule ); Set<DimensionalItemObject> ruleDimensionItemObjects = ruleItemIds.get( rule ).stream() .map( id -> dimensionItemMap.get( id ) ) .collect( Collectors.toSet() ); if ( ruleDimensionItemObjects != null ) { ruleX.setDimensionalItemObjects( ruleDimensionItemObjects ); Set<DataElementOperand> ruleDataElementOperands = ruleDimensionItemObjects.stream() .filter( o -> o != null && o.getDimensionItemType() == DimensionItemType.DATA_ELEMENT_OPERAND ) .map( o -> (DataElementOperand) o ) .collect( Collectors.toSet() ); if ( ruleDataElementOperands != null ) { ruleX.setDataElementOperands( ruleDataElementOperands ); Set<DataElement> ruleDataElements = ruleDataElementOperands.stream() .map( o -> o.getDataElement() ) .collect (Collectors.toSet() ); ruleX.setDataElements( ruleDataElements ); } } periodTypeX.getRuleXs().add( ruleX ); Set<DataElement> ruleDataElements = ruleX.getDataElements(); // Add data elements of rule to the period extended periodTypeX.getDataElements().addAll( ruleDataElements ); // Add the allowed period types for data elements of rule periodTypeX.getAllowedPeriodTypes().addAll( getAllowedPeriodTypesForDataElements( ruleDataElements, rule.getPeriodType() ) ); } return dimensionItemMap; } /** * Gets all required DimensionalItemObjects from their UIDs. * * @param expressionIdMap UIDs of DimensionalItemObjects to get. * @return map of the DimensionalItemObjects. */ private Map<String, DimensionalItemObject> getDimensionalItemObjects( SetMap<Class<? extends DimensionalItemObject>, String> expressionIdMap ) { // 1. Get ids for all the individual IdentifiableObjects within the DimensionalItemObjects: SetMap<Class<? extends IdentifiableObject>, String> idsToGet = new SetMap<>(); getIdentifiableObjectIds( idsToGet, expressionIdMap, DataElementOperand.class, DataElement.class, DataElementCategoryOptionCombo.class ); getIdentifiableObjectIds( idsToGet, expressionIdMap, ProgramDataElementDimensionItem.class, Program.class, DataElement.class ); getIdentifiableObjectIds( idsToGet, expressionIdMap, ProgramTrackedEntityAttributeDimensionItem.class, Program.class, TrackedEntityAttribute.class ); getIdentifiableObjectIds( idsToGet, expressionIdMap, ProgramIndicator.class, ProgramIndicator.class ); // 2. Look up all the IdentifiableObjects (each class all together, for best performance): MapMap<Class<? extends IdentifiableObject>, String, IdentifiableObject> idMap = new MapMap<>(); for ( Map.Entry<Class<? extends IdentifiableObject>, Set<String>> e : idsToGet.entrySet() ) { idMap.putEntries( e.getKey(), idObjectManager.get( e.getKey(), e.getValue() ).stream().collect( Collectors.toMap( o -> o.getUid(), o -> o ) ) ); } // 3. Build the map of DimensionalItemObjects: Map<String, DimensionalItemObject> dimObjects = new HashMap<>(); for ( Map.Entry<Class<? extends DimensionalItemObject>, Set<String>> e : expressionIdMap.entrySet() ) { for ( String id : e.getValue() ) { if ( e.getKey() == DataElementOperand.class ) { DataElementOperand deo = new DataElementOperand( (DataElement)idMap.getValue( DataElement.class, getIdPart( id, 0 ) ), (DataElementCategoryOptionCombo)idMap.getValue( DataElementCategoryOptionCombo.class, getIdPart( id, 1) ) ); if ( deo.getDataElement() != null && ( deo.getCategoryOptionCombo() != null || getIdPart( id, 1 ) == null ) ) { dimObjects.put( id, deo ); } } else if ( e.getKey() == ProgramDataElementDimensionItem.class ) { ProgramDataElementDimensionItem pde = new ProgramDataElementDimensionItem( (Program)idMap.getValue( Program.class, getIdPart( id, 0 ) ), (DataElement)idMap.getValue( DataElement.class, getIdPart( id, 1) ) ); if ( pde.getProgram() != null && pde.getDataElement() != null ) { dimObjects.put( id, pde ); } } else if ( e.getKey() == ProgramTrackedEntityAttributeDimensionItem.class ) { ProgramTrackedEntityAttributeDimensionItem pa = new ProgramTrackedEntityAttributeDimensionItem( (Program)idMap.getValue( Program.class, getIdPart( id, 0 ) ), (TrackedEntityAttribute)idMap.getValue( TrackedEntityAttribute.class, getIdPart( id, 1) ) ); if ( pa.getProgram() != null && pa.getAttribute() != null ) { dimObjects.put( id, pa ); } } else if ( e.getKey() == ProgramIndicator.class ) { ProgramIndicator pi = (ProgramIndicator)idMap.getValue( ProgramIndicator.class, id ); if ( pi != null ) { dimObjects.put( id, pi ); } } } } return dimObjects; } /** * Takes all the identifiers within a dimensional object class, and splits * them into identifiers for the identifiable objects that make up * the dimensional object. * * @param idsToGet To add to: identifiable object IDs to look up. * @param expressionIdMap Dimensional object IDs from expression. * @param dimClass Class of dimensional object * @param idClasses Component class(es) of identifiable objects */ @SafeVarargs private final void getIdentifiableObjectIds( SetMap<Class<? extends IdentifiableObject>, String> idsToGet, SetMap<Class<? extends DimensionalItemObject>, String> expressionIdMap, Class<? extends DimensionalItemObject> dimClass, Class<? extends IdentifiableObject>... idClasses ) { Set<String> expressionIds = expressionIdMap.get( dimClass ); if ( expressionIds == null ) { return; } for ( int i = 0; i < idClasses.length; i++ ) { for ( String expressionId : expressionIds ) { String objectId = getIdPart( expressionId, i ); if ( objectId != null ) { idsToGet.putValue( idClasses[ i ], objectId ); } } } } /** * Gets part of an object identifier which may be composite. * * @param id The identifier to parse. * @param index Index of the part to return. * @return The identifier part. */ private String getIdPart( String id, int index ) { return splitSafe( id, COMPOSITE_DIM_OBJECT_ESCAPED_SEP, index ); } /** * Removes any period types that don't have rules assigned to them. * * @param periodTypeExtendedMap period type map to extended period types. */ private void removeAnyUnneededPeriodTypes( Map<PeriodType, PeriodTypeExtended> periodTypeExtendedMap ) { Set<PeriodTypeExtended> periodTypeXs = new HashSet<>( periodTypeExtendedMap.values() ); for ( PeriodTypeExtended periodTypeX : periodTypeXs ) { if ( periodTypeX.getRuleXs().isEmpty() ) { periodTypeExtendedMap.remove( periodTypeX.getPeriodType() ); } } } /** * Adds a collection of organisation units to the validation run context. * * @param periodTypeExtendedMap period type map to extended period types. * @param orgUnits organisation units to add. */ private void addOrgUnitsToContext( Map<PeriodType, PeriodTypeExtended> periodTypeExtendedMap, Collection<OrganisationUnit> orgUnits ) { for ( OrganisationUnit orgUnit : orgUnits ) { Map<PeriodType, Set<DataElement>> orgUnitElementsMap = orgUnit.getDataElementsInDataSetsByPeriodType(); for ( PeriodTypeExtended periodTypeX : periodTypeExtendedMap.values() ) { periodTypeX.getOrgUnitDataElements().put( orgUnit, new HashSet<>() ); for ( PeriodType allowedType : periodTypeX.getAllowedPeriodTypes() ) { Set<DataElement> orgUnitDataElements = orgUnitElementsMap.get( allowedType ); if ( orgUnitDataElements != null ) { periodTypeX.getOrgUnitDataElements().get( orgUnit ).addAll( orgUnitDataElements ); } } } } } /** * Gets the PeriodTypeExtended from the context object. If not found, * creates a new PeriodTypeExtended object, puts it into the context object, * and returns it. * * @param periodTypeExtendedMap period type map to extended period types. * @param periodType period type to search for * @return period type extended from the context object */ private PeriodTypeExtended getOrCreatePeriodTypeExtended( Map<PeriodType, PeriodTypeExtended> periodTypeExtendedMap, PeriodType periodType ) { PeriodTypeExtended periodTypeX = periodTypeExtendedMap.get( periodType ); if ( periodTypeX == null ) { periodTypeX = new PeriodTypeExtended( periodType ); periodTypeExtendedMap.put( periodType, periodTypeX ); } return periodTypeX; } /** * Finds all period types that may contain given data elements, whose period * type interval is at least as long as the given period type. * * @param dataElements data elements to look for * @param periodType the minimum-length period type * @return all period types that are allowed for these data elements */ private static Set<PeriodType> getAllowedPeriodTypesForDataElements( Collection<DataElement> dataElements, PeriodType periodType ) { Set<PeriodType> allowedPeriodTypes = new HashSet<>(); if ( dataElements != null ) { for ( DataElement dataElement : dataElements ) { for ( DataSet dataSet : dataElement.getDataSets() ) { if ( dataSet.getPeriodType().getFrequencyOrder() >= periodType.getFrequencyOrder() ) { allowedPeriodTypes.add( dataSet.getPeriodType() ); } } } } return allowedPeriodTypes; } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.gradle.variant.conflict; import com.android.builder.model.AndroidLibrary; import com.android.tools.idea.gradle.IdeaAndroidProject; import com.android.tools.idea.gradle.messages.Message; import com.android.tools.idea.gradle.messages.ProjectSyncMessages; import com.android.tools.idea.gradle.service.notification.hyperlink.NotificationHyperlink; import com.android.tools.idea.gradle.util.GradleUtil; import com.android.tools.idea.gradle.variant.view.BuildVariantView; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.must.android.module.extension.AndroidModuleExtension; import java.util.Collection; import java.util.List; import java.util.Map; import static com.android.tools.idea.gradle.messages.CommonMessageGroupNames.VARIANT_SELECTION_CONFLICTS; import static com.android.tools.idea.gradle.variant.conflict.ConflictResolution.solveSelectionConflict; /** * Set of all variant-selection-related conflicts. We classify these conflicts in 2 groups: * <ol> * <li> * <b>Selection conflicts.</b> These conflicts occur when module A depends on module B/variant X but module B has variant Y selected * instead. These conflicts can be easily fixed by selecting the right variant in the "Build Variants" tool window. * </li> * <b>Structure conflicts.</b> These conflicts occur when there are multiple modules depending on different variants of a single module. * For example, module A depends on module E/variant X, module B depends on module E/variant Y and module C depends on module E/variant Z. * These conflicts cannot be resolved through the "Build Variants" tool window because regardless of the variant is selected on module E, * we will always have a selection conflict. These conflicts can be resolved by importing a subset of modules into the IDE (i.e. project * profiles.) * </ol> */ public class ConflictSet { @NotNull public static ConflictSet findConflicts(@NotNull Project project) { Map<String, Conflict> selectionConflicts = Maps.newHashMap(); Map<String, Conflict> structureConflicts = Maps.newHashMap(); ModuleManager moduleManager = ModuleManager.getInstance(project); for (Module module : moduleManager.getModules()) { IdeaAndroidProject currentProject = getAndroidProject(module); if (currentProject == null || !currentProject.isLibrary()) { continue; } String gradlePath = GradleUtil.getGradlePath(module); if (gradlePath == null) { continue; } String selectedVariant = currentProject.getSelectedVariant().getName(); for (Module dependent : ModuleUtilCore.getAllDependentModules(module)) { IdeaAndroidProject dependentProject = getAndroidProject(dependent); if (dependentProject == null) { continue; } String expectedVariant = getExpectedVariant(dependentProject, gradlePath); if (StringUtil.isEmpty(expectedVariant)) { continue; } addConflict(structureConflicts, module, selectedVariant, dependent, expectedVariant); if (!selectedVariant.equals(expectedVariant)) { addConflict(selectionConflicts, module, selectedVariant, dependent, expectedVariant); } } } // Structural conflicts are the ones that have more than one group of modules depending on different variants of another module. List<Conflict> filteredStructureConflicts = Lists.newArrayList(); for (Conflict conflict : structureConflicts.values()) { if (conflict.getVariants().size() > 1) { filteredStructureConflicts.add(conflict); } } return new ConflictSet(project, selectionConflicts.values(), filteredStructureConflicts); } @Nullable private static IdeaAndroidProject getAndroidProject(@NotNull Module module) { AndroidModuleExtension facet = ModuleUtilCore.getExtension(module, AndroidModuleExtension.class); if (facet == null || !facet.isGradleProject()) { return null; } return facet.getIdeaAndroidProject(); } private static void addConflict(@NotNull Map<String, Conflict> allConflicts, @NotNull Module source, @NotNull String selectedVariant, @NotNull Module affected, @NotNull String expectedVariant) { String causeName = source.getName(); Conflict conflict = allConflicts.get(causeName); if (conflict == null) { conflict = new Conflict(source, selectedVariant); allConflicts.put(causeName, conflict); } conflict.addAffectedModule(affected, expectedVariant); } @Nullable private static String getExpectedVariant(@NotNull IdeaAndroidProject dependentProject, @NotNull String dependencyGradlePath) { List<AndroidLibrary> dependencies = GradleUtil.getDirectLibraryDependencies(dependentProject.getSelectedVariant(), dependentProject); for (AndroidLibrary dependency : dependencies) { if (!dependencyGradlePath.equals(dependency.getProject())) { continue; } return dependency.getProjectVariant(); } return null; } @NotNull private final Project myProject; @NotNull private final ImmutableList<Conflict> mySelectionConflicts; @NotNull private final ImmutableList<Conflict> myStructureConflicts; ConflictSet(@NotNull Project project, @NotNull Collection<Conflict> selectionConflicts, @NotNull Collection<Conflict> structureConflicts) { myProject = project; mySelectionConflicts = ImmutableList.copyOf(selectionConflicts); myStructureConflicts = ImmutableList.copyOf(structureConflicts); } @NotNull public Project getProject() { return myProject; } @NotNull public List<Conflict> getSelectionConflicts() { return mySelectionConflicts; } @NotNull public List<Conflict> getStructureConflicts() { return myStructureConflicts; } /** * Shows the "variant selection" conflicts in the "Build Variant" and "Messages" windows. */ public void showSelectionConflicts() { ProjectSyncMessages messages = ProjectSyncMessages.getInstance(myProject); String groupName = VARIANT_SELECTION_CONFLICTS; messages.removeMessages(groupName); for (final Conflict conflict : mySelectionConflicts) { // Creates the "Select in 'Build Variants' window" hyperlink. final Module source = conflict.getSource(); String hyperlinkText = String.format("Select '%1$s' in \"Build Variants\" window", source.getName()); NotificationHyperlink selectInBuildVariantsWindowHyperlink = new NotificationHyperlink("select.conflict.in.variants.window", hyperlinkText) { @Override protected void execute(@NotNull Project project) { BuildVariantView.getInstance(project).findAndSelect(source); } }; // Creates the "Fix problem" hyperlink. NotificationHyperlink quickFixHyperlink = new NotificationHyperlink("fix.conflict", "Fix problem") { @Override protected void execute(@NotNull Project project) { boolean solved = solveSelectionConflict(conflict); if (solved) { ConflictSet conflicts = findConflicts(project); conflicts.showSelectionConflicts(); } } }; Message msg = new Message(groupName, Message.Type.ERROR, conflict.toString()); messages.add(msg, selectInBuildVariantsWindowHyperlink, quickFixHyperlink); } BuildVariantView.getInstance(myProject).updateContents(mySelectionConflicts); } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.hash; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.annotations.Beta; import com.google.common.primitives.UnsignedInts; import java.io.Serializable; /** * Static factories for creating {@link HashCode} instances; most users should never have to use * this. All returned instances are {@link Serializable}. * * @author Dimitris Andreou * @since 12.0 */ @Beta public final class HashCodes { private HashCodes() {} /** * Creates a 32-bit {@code HashCode}, of which the bytes will form the passed int, interpreted * in little endian order. */ public static HashCode fromInt(int hash) { return new IntHashCode(hash); } private static final class IntHashCode extends HashCode implements Serializable { final int hash; IntHashCode(int hash) { this.hash = hash; } @Override public int bits() { return 32; } @Override public byte[] asBytes() { return new byte[] { (byte) hash, (byte) (hash >> 8), (byte) (hash >> 16), (byte) (hash >> 24)}; } @Override public int asInt() { return hash; } @Override public long asLong() { throw new IllegalStateException("this HashCode only has 32 bits; cannot create a long"); } @Override public long padToLong() { return UnsignedInts.toLong(hash); } private static final long serialVersionUID = 0; } /** * Creates a 64-bit {@code HashCode}, of which the bytes will form the passed long, interpreted * in little endian order. */ public static HashCode fromLong(long hash) { return new LongHashCode(hash); } private static final class LongHashCode extends HashCode implements Serializable { final long hash; LongHashCode(long hash) { this.hash = hash; } @Override public int bits() { return 64; } @Override public byte[] asBytes() { return new byte[] { (byte) hash, (byte) (hash >> 8), (byte) (hash >> 16), (byte) (hash >> 24), (byte) (hash >> 32), (byte) (hash >> 40), (byte) (hash >> 48), (byte) (hash >> 56)}; } @Override public int asInt() { return (int) hash; } @Override public long asLong() { return hash; } @Override public long padToLong() { return hash; } private static final long serialVersionUID = 0; } /** * Creates a {@code HashCode} from a byte array. The array is defensively copied to preserve * the immutability contract of {@code HashCode}. The array cannot be empty. */ public static HashCode fromBytes(byte[] bytes) { checkArgument(bytes.length >= 1, "A HashCode must contain at least 1 byte."); return fromBytesNoCopy(bytes.clone()); } /** * Creates a {@code HashCode} from a byte array. The array is <i>not</i> copied defensively, * so it must be handed-off so as to preserve the immutability contract of {@code HashCode}. */ static HashCode fromBytesNoCopy(byte[] bytes) { return new BytesHashCode(bytes); } private static final class BytesHashCode extends HashCode implements Serializable { final byte[] bytes; BytesHashCode(byte[] bytes) { this.bytes = checkNotNull(bytes); } @Override public int bits() { return bytes.length * 8; } @Override public byte[] asBytes() { return bytes.clone(); } @Override public int asInt() { checkState(bytes.length >= 4, "HashCode#asInt() requires >= 4 bytes (it only has %s bytes).", bytes.length); return (bytes[0] & 0xFF) | ((bytes[1] & 0xFF) << 8) | ((bytes[2] & 0xFF) << 16) | ((bytes[3] & 0xFF) << 24); } @Override public long asLong() { checkState(bytes.length >= 8, "HashCode#asLong() requires >= 8 bytes (it only has %s bytes).", bytes.length); return (bytes[0] & 0xFFL) | ((bytes[1] & 0xFFL) << 8) | ((bytes[2] & 0xFFL) << 16) | ((bytes[3] & 0xFFL) << 24) | ((bytes[4] & 0xFFL) << 32) | ((bytes[5] & 0xFFL) << 40) | ((bytes[6] & 0xFFL) << 48) | ((bytes[7] & 0xFFL) << 56); } @Override public long padToLong() { return (bytes.length < 8) ? UnsignedInts.toLong(asInt()) : asLong(); } @Override public int hashCode() { if (bytes.length >= 4) { return asInt(); } else { int val = (bytes[0] & 0xFF); for (int i = 1; i < bytes.length; i++) { val |= ((bytes[i] & 0xFF) << (i * 8)); } return val; } } private static final long serialVersionUID = 0; } }
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.catalog; import java.io.IOException; import java.net.ConnectException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HServerInfo; import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; /** * Writes region and assignment information to <code>.META.</code>. * <p> * Uses the {@link CatalogTracker} to obtain locations and connections to * catalogs. */ public class MetaEditor { private static final Log LOG = LogFactory.getLog(MetaEditor.class); /** * Adds a META row for the specified new region. * @param regionInfo region information * @throws IOException if problem connecting or updating meta */ public static void addRegionToMeta(CatalogTracker catalogTracker, HRegionInfo regionInfo) throws IOException { Put put = new Put(regionInfo.getRegionName()); put.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, Writables.getBytes(regionInfo)); catalogTracker.waitForMetaServerConnectionDefault().put( CatalogTracker.META_REGION, put); LOG.info("Added region " + regionInfo.getRegionNameAsString() + " to META"); } /** * Offline parent in meta. * Used when splitting. * @param catalogTracker * @param parent * @param a Split daughter region A * @param b Split daughter region B * @throws NotAllMetaRegionsOnlineException * @throws IOException */ public static void offlineParentInMeta(CatalogTracker catalogTracker, HRegionInfo parent, final HRegionInfo a, final HRegionInfo b) throws NotAllMetaRegionsOnlineException, IOException { HRegionInfo copyOfParent = new HRegionInfo(parent); copyOfParent.setOffline(true); copyOfParent.setSplit(true); Put put = new Put(copyOfParent.getRegionName()); addRegionInfo(put, copyOfParent); put.add(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER, Writables.getBytes(a)); put.add(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER, Writables.getBytes(b)); catalogTracker.waitForMetaServerConnectionDefault().put(CatalogTracker.META_REGION, put); LOG.info("Offlined parent region " + parent.getRegionNameAsString() + " in META"); } public static void addDaughter(final CatalogTracker catalogTracker, final HRegionInfo regionInfo, final HServerInfo serverInfo) throws NotAllMetaRegionsOnlineException, IOException { HRegionInterface server = catalogTracker.waitForMetaServerConnectionDefault(); byte [] catalogRegionName = CatalogTracker.META_REGION; Put put = new Put(regionInfo.getRegionName()); addRegionInfo(put, regionInfo); if (serverInfo != null) addLocation(put, serverInfo); server.put(catalogRegionName, put); LOG.info("Added daughter " + regionInfo.getRegionNameAsString() + " in region " + Bytes.toString(catalogRegionName) + (serverInfo == null? ", serverInfo=null": ", serverInfo=" + serverInfo.getServerName())); } /** * Updates the location of the specified META region in ROOT to be the * specified server hostname and startcode. * <p> * Uses passed catalog tracker to get a connection to the server hosting * ROOT and makes edits to that region. * * @param catalogTracker catalog tracker * @param regionInfo region to update location of * @param serverInfo server the region is located on * @throws IOException * @throws ConnectException Usually because the regionserver carrying .META. * is down. * @throws NullPointerException Because no -ROOT- server connection */ public static void updateMetaLocation(CatalogTracker catalogTracker, HRegionInfo regionInfo, HServerInfo serverInfo) throws IOException, ConnectException { HRegionInterface server = catalogTracker.waitForRootServerConnectionDefault(); if (server == null) throw new IOException("No server for -ROOT-"); updateLocation(server, CatalogTracker.ROOT_REGION, regionInfo, serverInfo); } /** * Updates the location of the specified region in META to be the specified * server hostname and startcode. * <p> * Uses passed catalog tracker to get a connection to the server hosting * META and makes edits to that region. * * @param catalogTracker catalog tracker * @param regionInfo region to update location of * @param serverInfo server the region is located on * @throws IOException */ public static void updateRegionLocation(CatalogTracker catalogTracker, HRegionInfo regionInfo, HServerInfo serverInfo) throws IOException { updateLocation(catalogTracker.waitForMetaServerConnectionDefault(), CatalogTracker.META_REGION, regionInfo, serverInfo); } /** * Updates the location of the specified region to be the specified server. * <p> * Connects to the specified server which should be hosting the specified * catalog region name to perform the edit. * * @param server connection to server hosting catalog region * @param catalogRegionName name of catalog region being updated * @param regionInfo region to update location of * @param serverInfo server the region is located on * @throws IOException In particular could throw {@link java.net.ConnectException} * if the server is down on other end. */ private static void updateLocation(HRegionInterface server, byte [] catalogRegionName, HRegionInfo regionInfo, HServerInfo serverInfo) throws IOException { Put put = new Put(regionInfo.getRegionName()); addLocation(put, serverInfo); server.put(catalogRegionName, put); LOG.info("Updated row " + regionInfo.getRegionNameAsString() + " in region " + Bytes.toString(catalogRegionName) + " with " + "server=" + serverInfo.getHostnamePort() + ", " + "startcode=" + serverInfo.getStartCode()); } /** * Deletes the specified region from META. * @param catalogTracker * @param regionInfo region to be deleted from META * @throws IOException */ public static void deleteRegion(CatalogTracker catalogTracker, HRegionInfo regionInfo) throws IOException { Delete delete = new Delete(regionInfo.getRegionName()); catalogTracker.waitForMetaServerConnectionDefault(). delete(CatalogTracker.META_REGION, delete); LOG.info("Deleted region " + regionInfo.getRegionNameAsString() + " from META"); } /** * Deletes daughter reference in offlined split parent. * @param catalogTracker * @param parent Parent row we're to remove daughter reference from * @param qualifier SplitA or SplitB daughter to remove * @param daughter * @throws NotAllMetaRegionsOnlineException * @throws IOException */ public static void deleteDaughterReferenceInParent(CatalogTracker catalogTracker, final HRegionInfo parent, final byte [] qualifier, final HRegionInfo daughter) throws NotAllMetaRegionsOnlineException, IOException { Delete delete = new Delete(parent.getRegionName()); delete.deleteColumns(HConstants.CATALOG_FAMILY, qualifier); catalogTracker.waitForMetaServerConnectionDefault(). delete(CatalogTracker.META_REGION, delete); LOG.info("Deleted daughter reference " + daughter.getRegionNameAsString() + ", qualifier=" + Bytes.toString(qualifier) + ", from parent " + parent.getRegionNameAsString()); } /** * Updates the region information for the specified region in META. * @param catalogTracker * @param regionInfo region to be updated in META * @throws IOException */ public static void updateRegionInfo(CatalogTracker catalogTracker, HRegionInfo regionInfo) throws IOException { Put put = new Put(regionInfo.getRegionName()); addRegionInfo(put, regionInfo); catalogTracker.waitForMetaServerConnectionDefault().put( CatalogTracker.META_REGION, put); LOG.info("Updated region " + regionInfo.getRegionNameAsString() + " in META"); } private static Put addRegionInfo(final Put p, final HRegionInfo hri) throws IOException { p.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, Writables.getBytes(hri)); return p; } private static Put addLocation(final Put p, final HServerInfo hsi) { p.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes.toBytes(hsi.getHostnamePort())); p.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(hsi.getStartCode())); return p; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.federation.resolver.order; import static org.apache.hadoop.hdfs.server.federation.resolver.order.AvailableSpaceResolver.BALANCER_PREFERENCE_DEFAULT; import static org.apache.hadoop.hdfs.server.federation.resolver.order.AvailableSpaceResolver.BALANCER_PREFERENCE_KEY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.federation.resolver.MultipleDestinationMountTableResolver; import org.apache.hadoop.hdfs.server.federation.resolver.PathLocation; import org.apache.hadoop.hdfs.server.federation.resolver.order.AvailableSpaceResolver.SubclusterAvailableSpace; import org.apache.hadoop.hdfs.server.federation.resolver.order.AvailableSpaceResolver.SubclusterSpaceComparator; import org.apache.hadoop.hdfs.server.federation.router.Router; import org.apache.hadoop.hdfs.server.federation.store.MembershipStore; import org.apache.hadoop.hdfs.server.federation.store.StateStoreService; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetNamenodeRegistrationsRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetNamenodeRegistrationsResponse; import org.apache.hadoop.hdfs.server.federation.store.records.MembershipState; import org.apache.hadoop.hdfs.server.federation.store.records.MembershipStats; import org.apache.hadoop.hdfs.server.federation.store.records.MountTable; import org.apache.hadoop.hdfs.server.federation.store.records.impl.pb.MembershipStatsPBImpl; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; /** * Test the {@link AvailableSpaceResolver}. */ public class TestAvailableSpaceResolver { private static final int SUBCLUSTER_NUM = 10; @Test public void testResolverWithNoPreference() throws IOException { MultipleDestinationMountTableResolver mountTableResolver = mockAvailableSpaceResolver(1.0f); // Since we don't have any preference, it will // always chose the maximum-available-space subcluster. PathLocation loc = mountTableResolver.getDestinationForPath("/space"); assertEquals("subcluster9", loc.getDestinations().get(0).getNameserviceId()); loc = mountTableResolver.getDestinationForPath("/space/subdir"); assertEquals("subcluster9", loc.getDestinations().get(0).getNameserviceId()); } @Test public void testResolverWithDefaultPreference() throws IOException { MultipleDestinationMountTableResolver mountTableResolver = mockAvailableSpaceResolver(BALANCER_PREFERENCE_DEFAULT); int retries = 10; int retryTimes = 0; // There is chance we won't always chose the // maximum-available-space subcluster. for (retryTimes = 0; retryTimes < retries; retryTimes++) { PathLocation loc = mountTableResolver.getDestinationForPath("/space"); if (!"subcluster9" .equals(loc.getDestinations().get(0).getNameserviceId())) { break; } } assertNotEquals(retries, retryTimes); } /** * Mock the available space based resolver. * * @param balancerPreference The balancer preference for the resolver. * @throws IOException * @return MultipleDestinationMountTableResolver instance. */ @SuppressWarnings("unchecked") private MultipleDestinationMountTableResolver mockAvailableSpaceResolver( float balancerPreference) throws IOException { Configuration conf = new Configuration(); conf.setFloat(BALANCER_PREFERENCE_KEY, balancerPreference); Router router = mock(Router.class); StateStoreService stateStore = mock(StateStoreService.class); MembershipStore membership = mock(MembershipStore.class); when(router.getStateStore()).thenReturn(stateStore); when(stateStore.getRegisteredRecordStore(any(Class.class))) .thenReturn(membership); GetNamenodeRegistrationsResponse response = GetNamenodeRegistrationsResponse.newInstance(); // Set the mapping for each client List<MembershipState> records = new LinkedList<>(); for (int i = 0; i < SUBCLUSTER_NUM; i++) { records.add(newMembershipState("subcluster" + i, i)); } response.setNamenodeMemberships(records); when(membership .getNamenodeRegistrations(any(GetNamenodeRegistrationsRequest.class))) .thenReturn(response); // construct available space resolver AvailableSpaceResolver resolver = new AvailableSpaceResolver(conf, router); MultipleDestinationMountTableResolver mountTableResolver = new MultipleDestinationMountTableResolver(conf, router); mountTableResolver.addResolver(DestinationOrder.SPACE, resolver); // We point /space to subclusters [0,..9] with the SPACE order Map<String, String> destinations = new HashMap<>(); for (int i = 0; i < SUBCLUSTER_NUM; i++) { destinations.put("subcluster" + i, "/space"); } MountTable spaceEntry = MountTable.newInstance("/space", destinations); spaceEntry.setDestOrder(DestinationOrder.SPACE); mountTableResolver.addEntry(spaceEntry); return mountTableResolver; } public static MembershipState newMembershipState(String nameservice, long availableSpace) { MembershipState record = MembershipState.newInstance(); record.setNameserviceId(nameservice); MembershipStats stats = new MembershipStatsPBImpl(); stats.setAvailableSpace(availableSpace); record.setStats(stats); return record; } @Test public void testSubclusterSpaceComparator() { verifyRank(0.0f, true, false); verifyRank(1.0f, true, true); verifyRank(0.5f, false, false); verifyRank(BALANCER_PREFERENCE_DEFAULT, false, false); // test for illegal cases try { verifyRank(2.0f, false, false); fail("Subcluster comparison should be failed."); } catch (IllegalArgumentException e) { GenericTestUtils.assertExceptionContains( "The balancer preference value should be in the range 0.0 - 1.0", e); } try { verifyRank(-1.0f, false, false); fail("Subcluster comparison should be failed."); } catch (IllegalArgumentException e) { GenericTestUtils.assertExceptionContains( "The balancer preference value should be in the range 0.0 - 1.0", e); } } /** * Verify result rank with {@link SubclusterSpaceComparator}. * @param balancerPreference The balancer preference used * in {@link SubclusterSpaceComparator}. * @param shouldOrdered The result rank should be ordered. * @param isDesc If the rank result is in a descending order. */ private void verifyRank(float balancerPreference, boolean shouldOrdered, boolean isDesc) { List<SubclusterAvailableSpace> subclusters = new LinkedList<>(); for (int i = 0; i < SUBCLUSTER_NUM; i++) { subclusters.add(new SubclusterAvailableSpace("subcluster" + i, i)); } // shuffle the cluster list if we expect rank to be ordered if (shouldOrdered) { Collections.shuffle(subclusters); } SubclusterSpaceComparator comparator = new SubclusterSpaceComparator( balancerPreference); Collections.sort(subclusters, comparator); int i = SUBCLUSTER_NUM - 1; for (; i >= 0; i--) { SubclusterAvailableSpace cluster = subclusters .get(SUBCLUSTER_NUM - 1 - i); if (shouldOrdered) { if (isDesc) { assertEquals("subcluster" + i, cluster.getNameserviceId()); assertEquals(i, cluster.getAvailableSpace()); } else { assertEquals("subcluster" + (SUBCLUSTER_NUM - 1 - i), cluster.getNameserviceId()); assertEquals(SUBCLUSTER_NUM - 1 - i, cluster.getAvailableSpace()); } } else { // If catch one cluster is not in ordered, that's expected behavior. if (!cluster.getNameserviceId().equals("subcluster" + i) && cluster.getAvailableSpace() != i) { break; } } } // The var i won't reach to 0 since cluster list won't be completely // ordered. if (!shouldOrdered) { assertNotEquals(0, i); } subclusters.clear(); } }
package org.hisp.dhis.system.deletion; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.attribute.Attribute; import org.hisp.dhis.attribute.AttributeValue; import org.hisp.dhis.chart.Chart; import org.hisp.dhis.color.Color; import org.hisp.dhis.color.ColorSet; import org.hisp.dhis.constant.Constant; import org.hisp.dhis.dashboard.Dashboard; import org.hisp.dhis.dashboard.DashboardItem; import org.hisp.dhis.dataapproval.DataApprovalLevel; import org.hisp.dhis.dataapproval.DataApprovalWorkflow; import org.hisp.dhis.dataelement.CategoryOptionGroup; import org.hisp.dhis.dataelement.CategoryOptionGroupSet; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategory; import org.hisp.dhis.dataelement.DataElementCategoryCombo; import org.hisp.dhis.dataelement.DataElementCategoryOption; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataelement.DataElementGroup; import org.hisp.dhis.dataelement.DataElementGroupSet; import org.hisp.dhis.dataentryform.DataEntryForm; import org.hisp.dhis.dataset.CompleteDataSetRegistration; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.dataset.LockException; import org.hisp.dhis.dataset.Section; import org.hisp.dhis.datavalue.DataValue; import org.hisp.dhis.document.Document; import org.hisp.dhis.eventchart.EventChart; import org.hisp.dhis.eventreport.EventReport; import org.hisp.dhis.expression.Expression; import org.hisp.dhis.i18n.locale.I18nLocale; import org.hisp.dhis.indicator.Indicator; import org.hisp.dhis.indicator.IndicatorGroup; import org.hisp.dhis.indicator.IndicatorGroupSet; import org.hisp.dhis.indicator.IndicatorType; import org.hisp.dhis.interpretation.Interpretation; import org.hisp.dhis.legend.Legend; import org.hisp.dhis.legend.LegendSet; import org.hisp.dhis.mapping.ExternalMapLayer; import org.hisp.dhis.mapping.Map; import org.hisp.dhis.mapping.MapView; import org.hisp.dhis.minmax.MinMaxDataElement; import org.hisp.dhis.option.Option; import org.hisp.dhis.option.OptionGroup; import org.hisp.dhis.option.OptionGroupSet; import org.hisp.dhis.option.OptionSet; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitGroup; import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitLevel; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.RelativePeriods; import org.hisp.dhis.program.Program; import org.hisp.dhis.program.ProgramDataElement; import org.hisp.dhis.program.ProgramIndicator; import org.hisp.dhis.program.ProgramIndicatorGroup; import org.hisp.dhis.program.ProgramInstance; import org.hisp.dhis.program.ProgramStage; import org.hisp.dhis.program.ProgramStageDataElement; import org.hisp.dhis.program.ProgramStageInstance; import org.hisp.dhis.program.ProgramStageSection; import org.hisp.dhis.program.ProgramTrackedEntityAttribute; import org.hisp.dhis.program.message.ProgramMessage; import org.hisp.dhis.program.notification.ProgramNotificationTemplate; import org.hisp.dhis.programrule.ProgramRule; import org.hisp.dhis.programrule.ProgramRuleAction; import org.hisp.dhis.programrule.ProgramRuleVariable; import org.hisp.dhis.relationship.Relationship; import org.hisp.dhis.relationship.RelationshipType; import org.hisp.dhis.report.Report; import org.hisp.dhis.reporttable.ReportTable; import org.hisp.dhis.security.oauth2.OAuth2Client; import org.hisp.dhis.sqlview.SqlView; import org.hisp.dhis.trackedentity.TrackedEntity; import org.hisp.dhis.trackedentity.TrackedEntityAttribute; import org.hisp.dhis.trackedentity.TrackedEntityAttributeGroup; import org.hisp.dhis.trackedentity.TrackedEntityInstance; import org.hisp.dhis.trackedentityattributevalue.TrackedEntityAttributeValue; import org.hisp.dhis.trackedentitycomment.TrackedEntityComment; import org.hisp.dhis.trackedentitydatavalue.TrackedEntityDataValue; import org.hisp.dhis.user.User; import org.hisp.dhis.user.UserAuthorityGroup; import org.hisp.dhis.user.UserCredentials; import org.hisp.dhis.user.UserGroup; import org.hisp.dhis.user.UserSetting; import org.hisp.dhis.validation.ValidationCriteria; import org.hisp.dhis.validation.ValidationRule; import org.hisp.dhis.validation.ValidationRuleGroup; /** * A DeletionHandler should override methods for objects that, when deleted, * will affect the current object in any way. Eg. a DeletionHandler for * DataElementGroup should override the deleteDataElement(..) method which * should remove the DataElement from all DataElementGroups. Also, it should * override the allowDeleteDataElement() method and return a non-null String value * if there exists objects that are dependent on the DataElement and are * considered not be deleted. The return value could be a hint for which object * is denying the delete, like the name. * * @author Lars Helge Overland */ public abstract class DeletionHandler { protected static final String ERROR = ""; // ------------------------------------------------------------------------- // Abstract methods // ------------------------------------------------------------------------- protected abstract String getClassName(); // ------------------------------------------------------------------------- // Public methods // ------------------------------------------------------------------------- public void deleteAttribute( Attribute attribute ) { } public String allowDeleteAttribute( Attribute attribute ) { return null; } public void deleteAttributeValue( AttributeValue attributeValue ) { } public String allowDeleteAttributeValue( AttributeValue attributeValue ) { return null; } public void deleteChart( Chart chart ) { } public String allowDeleteChart( Chart chart ) { return null; } public void deleteDataApprovalLevel( DataApprovalLevel dataApprovalLevel ) { } public String allowDeleteDataApprovalLevel( DataApprovalLevel dataApprovalLevel ) { return null; } public void deleteDataApprovalWorkflow( DataApprovalWorkflow workflow ) { } public String allowDeleteDataApprovalWorkflow( DataApprovalWorkflow workflow ) { return null; } public void deleteDataElement( DataElement dataElement ) { } public String allowDeleteDataElement( DataElement dataElement ) { return null; } public void deleteDataElementGroup( DataElementGroup dataElementGroup ) { } public String allowDeleteDataElementGroup( DataElementGroup dataElementGroup ) { return null; } public void deleteDataElementGroupSet( DataElementGroupSet dataElementGroupSet ) { } public String allowDeleteDataElementGroupSet( DataElementGroupSet dataElementGroupSet ) { return null; } public void deleteDataElementCategory( DataElementCategory category ) { } public String allowDeleteDataElementCategory( DataElementCategory category ) { return null; } public void deleteDataElementCategoryOption( DataElementCategoryOption categoryOption ) { } public String allowDeleteDataElementCategoryOption( DataElementCategoryOption categoryOption ) { return null; } public void deleteDataElementCategoryCombo( DataElementCategoryCombo categoryCombo ) { } public String allowDeleteDataElementCategoryCombo( DataElementCategoryCombo categoryCombo ) { return null; } public void deleteDataElementCategoryOptionCombo( DataElementCategoryOptionCombo categoryOptionCombo ) { } public void deleteProgramMessage( ProgramMessage programMessage ) { } public String allowDeleteProgramMessage( ProgramMessage programMessage ) { return null; } public String allowDeleteDataElementCategoryOptionCombo( DataElementCategoryOptionCombo categoryOptionCombo ) { return null; } public void deleteDataSet( DataSet dataSet ) { } public String allowDeleteDataSet( DataSet dataSet ) { return null; } public void deleteSection( Section section ) { } public String allowDeleteSection( Section section ) { return null; } public void deleteCompleteDataSetRegistration( CompleteDataSetRegistration registration ) { } public String allowDeleteCompleteDataSetRegistration( CompleteDataSetRegistration registration ) { return null; } public void deleteDataValue( DataValue dataValue ) { } public String allowDeleteDataValue( DataValue dataValue ) { return null; } public void deleteExpression( Expression expression ) { } public String allowDeleteExpression( Expression expression ) { return null; } public void deleteMinMaxDataElement( MinMaxDataElement minMaxDataElement ) { } public String allowDeleteMinMaxDataElement( MinMaxDataElement minMaxDataElement ) { return null; } public void deleteIndicator( Indicator indicator ) { } public String allowDeleteIndicator( Indicator indicator ) { return null; } public void deleteIndicatorGroup( IndicatorGroup indicatorGroup ) { } public String allowDeleteIndicatorGroup( IndicatorGroup indicatorGroup ) { return null; } public void deleteIndicatorType( IndicatorType indicatorType ) { } public String allowDeleteIndicatorType( IndicatorType indicatorType ) { return null; } public void deleteIndicatorGroupSet( IndicatorGroupSet indicatorGroupSet ) { } public String allowDeleteIndicatorGroupSet( IndicatorGroupSet indicatorGroupSet ) { return null; } public void deletePeriod( Period period ) { } public String allowDeletePeriod( Period period ) { return null; } public void deleteRelativePeriods( RelativePeriods relativePeriods ) { } public String allowDeleteRelativePeriods( RelativePeriods relativePeriods ) { return null; } public void deleteValidationRule( ValidationRule validationRule ) { } public String allowDeleteValidationRule( ValidationRule validationRule ) { return null; } public void deleteValidationRuleGroup( ValidationRuleGroup validationRuleGroup ) { } public String allowDeleteValidationRuleGroup( ValidationRuleGroup validationRuleGroup ) { return null; } public void deleteDataEntryForm( DataEntryForm form ) { } public String allowDeleteDataEntryForm( DataEntryForm form ) { return null; } public void deleteOrganisationUnit( OrganisationUnit unit ) { } public String allowDeleteOrganisationUnit( OrganisationUnit unit ) { return null; } public void deleteOrganisationUnitGroup( OrganisationUnitGroup group ) { } public String allowDeleteOrganisationUnitGroup( OrganisationUnitGroup group ) { return null; } public void deleteOrganisationUnitGroupSet( OrganisationUnitGroupSet groupSet ) { } public String allowDeleteOrganisationUnitGroupSet( OrganisationUnitGroupSet groupSet ) { return null; } public void deleteOrganisationUnitLevel( OrganisationUnitLevel level ) { } public String allowDeleteOrganisationUnitLevel( OrganisationUnitLevel level ) { return null; } public void deleteReport( Report report ) { } public String allowDeleteReport( Report report ) { return null; } public void deleteReportTable( ReportTable reportTable ) { } public String allowDeleteReportTable( ReportTable reportTable ) { return null; } public void deleteUser( User user ) { } public String allowDeleteUser( User user ) { return null; } public void deleteUserCredentials( UserCredentials userCredentials ) { } public String allowDeleteUserCredentials( UserCredentials userCredentials ) { return null; } public void deleteUserAuthorityGroup( UserAuthorityGroup authorityGroup ) { } public String allowDeleteUserAuthorityGroup( UserAuthorityGroup authorityGroup ) { return null; } public String allowDeleteUserGroup( UserGroup userGroup ) { return null; } public void deleteUserGroup( UserGroup userGroup ) { } public void deleteUserSetting( UserSetting userSetting ) { } public String allowDeleteUserSetting( UserSetting userSetting ) { return null; } public void deleteDocument( Document document ) { } public String allowDeleteDocument( Document document ) { return null; } public void deleteLegend( Legend mapLegend ) { } public String allowDeleteLegend( Legend mapLegend ) { return null; } public void deleteLegendSet( LegendSet legendSet ) { } public String allowDeleteLegendSet( LegendSet legendSet ) { return null; } public void deleteMap( Map map ) { } public String allowDeleteMap( Map map ) { return null; } public void deleteExternalMapLayer( ExternalMapLayer externalMapLayer ) { } public String allowDeleteExternalMapLayer( ExternalMapLayer externalMapLayer ) { return null; } public void deleteMapView( MapView mapView ) { } public String allowDeleteMapView( MapView mapView ) { return null; } public void deleteInterpretation( Interpretation interpretation ) { } public String allowDeleteIntepretation( Interpretation interpretation ) { return null; } public void deleteTrackedEntityInstance( TrackedEntityInstance entityInstance ) { } public String allowDeleteTrackedEntityInstance( TrackedEntityInstance entityInstance ) { return null; } public void deleteTrackedEntityComment( TrackedEntityComment entityComment ) { } public String allowDeleteTrackedEntityComment( TrackedEntityComment entityComment ) { return null; } public String allowDeleteTrackedEntityAttribute( TrackedEntityAttribute attribute ) { return null; } public void deleteTrackedEntityAttribute( TrackedEntityAttribute attribute ) { } public String allowDeleteTrackedEntityAttributeValue( TrackedEntityAttributeValue attributeValue ) { return null; } public void deleteTrackedEntityAttributeValue( TrackedEntityAttributeValue attributeValue ) { } public String allowDeleteTrackedEntityAttributeGroup( TrackedEntityAttributeGroup attributeGroup ) { return null; } public void deleteTrackedEntityAttributeGroup( TrackedEntityAttributeGroup attributeGroup ) { } public String allowDeleteRelationship( Relationship relationship ) { return null; } public void deleteRelationship( Relationship relationship ) { } public String allowDeleteRelationshipType( RelationshipType relationshipType ) { return null; } public void deleteRelationshipType( RelationshipType relationshipType ) { } public String allowDeleteProgram( Program program ) { return null; } public void deleteProgram( Program program ) { } public String allowDeleteProgramInstance( ProgramInstance programInstance ) { return null; } public void deleteProgramInstance( ProgramInstance programInstance ) { } public String allowDeleteProgramStage( ProgramStage programStage ) { return null; } public void deleteProgramStage( ProgramStage programStage ) { } public void deleteProgramStageSection( ProgramStageSection programStageSection ) { } public String allowDeleteProgramStageSection( ProgramStageSection programStageSection ) { return null; } public String allowDeleteProgramStageInstance( ProgramStageInstance programStageInstance ) { return null; } public void deleteProgramStageInstance( ProgramStageInstance programStageInstance ) { } public void allowDeleteProgramNotificationTemplate( ProgramNotificationTemplate programNotificationTemplate ) { } public void deleteProgramNotificationTemplate( ProgramNotificationTemplate programNotificationTemplate ) { } public String allowDeleteProgramRule( ProgramRule programRule ) { return null; } public void deleteProgramRule( ProgramRule programRule ) { } public String allowDeleteProgramRuleVariable( ProgramRuleVariable programRuleVariable ) { return null; } public void deleteProgramRuleVariable( ProgramRuleVariable programRuleVariable ) { } public String allowDeleteProgramRuleAction( ProgramRuleAction programRuleAction ) { return null; } public void deleteProgramRuleAction( ProgramRuleAction programRuleAction ) { } public String allowDeleteProgramStageDataElement( ProgramStageDataElement programStageDataElement ) { return null; } public void deleteProgramStageDataElement( ProgramStageDataElement programStageDataElement ) { } public String allowDeleteTrackedEntityDataValue( TrackedEntityDataValue dataValue ) { return null; } public void deleteTrackedEntityDataValue( TrackedEntityDataValue dataValue ) { } public void deleteProgramIndicator( ProgramIndicator programIndicator ) { } public String allowDeleteProgramIndicator( ProgramIndicator programIndicator ) { return null; } public void deleteProgramIndicatorGroup( ProgramIndicatorGroup programIndicatorGroup ) { } public String allowDeleteProgramIndicatorGroup( ProgramIndicatorGroup programIndicatorGroup ) { return null; } public String allowDeleteValidationCriteria( ValidationCriteria validationCriteria ) { return null; } public void deleteValidationCriteria( ValidationCriteria validationCriteria ) { } public String allowDeleteConstant( Constant constant ) { return null; } public void deleteConstant( Constant constant ) { } public String allowDeleteOptionSet( OptionSet optionSet ) { return null; } public void deleteOptionSet( OptionSet optionSet ) { } public String allowDeleteOptionGroupSet( OptionGroupSet optionGroupSet ) { return null; } public void deleteOptionGroupSet( OptionGroupSet optionGroupSet ) { } public String allowDeleteOptionGroup( OptionGroup optionGroup ) { return null; } public void deleteOptionGroup( OptionGroup optionGroup ) { } public String allowDeleteOption( Option option ) { return null; } public void deleteOption( Option optionSet ) { } public String allowDeleteLockException( LockException lockException ) { return null; } public void deleteLockException( LockException lockException ) { } public void deleteIntepretation( Interpretation interpretation ) { } public String allowDeleteInterpretation( Interpretation interpretation ) { return null; } public void deleteI18nLocale( I18nLocale i18nLocale ) { } public String allowDeleteI18nLocale( I18nLocale i18nLocale ) { return null; } public void deleteSqlView( SqlView sqlView ) { } public String allowDeleteSqlView( SqlView sqlView ) { return null; } public void deleteDashboard( Dashboard dashboard ) { } public String allowDeleteDashboard( Dashboard dashboard ) { return null; } public void deleteDashboardItem( DashboardItem dashboardItem ) { } public String allowDeleteDashboardItem( DashboardItem dashboardItem ) { return null; } public void deleteCategoryOptionGroup( CategoryOptionGroup categoryOptionGroup ) { } public String allowDeleteCategoryOptionGroup( CategoryOptionGroup categoryOptionGroup ) { return null; } public void deleteCategoryOptionGroupSet( CategoryOptionGroupSet categoryOptionGroupSet ) { } public String allowDeleteCategoryOptionGroupSet( CategoryOptionGroupSet categoryOptionGroupSet ) { return null; } public void deleteTrackedEntity( TrackedEntity trackedEntity ) { } public String allowDeleteTrackedEntity( TrackedEntity trackedEntity ) { return null; } public void deleteEventReport( EventReport eventReport ) { } public String allowDeleteEventReport( EventReport eventReport ) { return null; } public void deleteEventChart( EventChart eventChart ) { } public String allowDeleteEventChart( EventChart eventChart ) { return null; } public void deleteOAuth2Client( OAuth2Client oAuth2Client ) { } public String allowDeleteOAuth2Client( OAuth2Client oAuth2Client ) { return null; } public String allowDeleteProgramDataElement( ProgramDataElement programDataElement ) { return null; } public void deleteProgramDataElement( ProgramDataElement programDataElement ) { } public void deleteProgramTrackedEntityAttribute( ProgramTrackedEntityAttribute attribute ) { } public String allowDeleteProgramTrackedEntityAttribute( ProgramTrackedEntityAttribute attribute ) { return null; } public String allowDeleteColorSet( ColorSet colorSet ) { return null; } public void deleteColorSet( ColorSet colorSet ) { } public String allowDeleteColor( Color color ) { return null; } public void deleteColor( Color color ) { } }
package analyzer.level2; import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; import util.logging.L2Logger; import java.util.logging.Logger; public class MultiArraySuccess { Logger logger = L2Logger.getLogger(); @Before public void init() { HandleStmt.init(); } @Test public void createArray() { logger.info("createArray success test started"); HandleStmt hs = new HandleStmt(); hs.initHandleStmtUtils(false, 0); String[][] twoD = new String[2][2]; hs.addArrayToObjectMap(twoD); assertEquals(1, hs.getNumberOfElementsInObjectMap()); assertEquals(2, hs.getNumberOfFieldsInObjectMap(twoD)); /* * The following is the Jimple representation of: * String[][][] threeD = new String[][][] {{{"e"}},{{"f"}},{{"g"}}}; */ String[][][] threeD = new String[3][][]; String[][] tmp1 = new String[1][]; String[][] tmp2 = new String[1][]; String[][] tmp3 = new String[1][]; String[] tmp4 = new String[1]; String[] tmp5 = new String[1]; String[] tmp6 = new String[1]; tmp4[0] = "e"; tmp1[0] = tmp4; threeD[0] = tmp1; tmp5[0] = "f"; tmp2[0] = tmp5; threeD[1] = tmp2; tmp6[0] = "g"; tmp3[0] = tmp6; threeD[2] = tmp3; hs.close(); logger.info("createArray success test finished"); } @Test public void findNewInstancesOfElements() { logger.info("createArray success test started"); HandleStmt hs = new HandleStmt(); hs.initHandleStmtUtils(false, 0); String[][] twoD = new String[2][2]; hs.addArrayToObjectMap(twoD); hs.addLocal("String[][]_twoD"); assertEquals(1, hs.getNumberOfElementsInObjectMap()); assertEquals(2, hs.getNumberOfFieldsInObjectMap(twoD)); /* * The following is the Jimple representation of: * twoD[0][0] = "first element"; * twoD[0][1] = "second element"; */ hs.addLocal("String[]_tmp1"); String[] tmp1 = new String[1]; hs.addArrayToObjectMap(tmp1); hs.checkArrayWithGlobalPC(tmp1, Integer.toString(0), "String[]_tmp1"); hs.setLevelOfArrayField(tmp1, Integer.toString(0), "String[]_tmp1"); tmp1[0] = "first element"; hs.addLocal("String[]_tmp2"); String[] tmp2 = new String[1]; hs.addArrayToObjectMap(tmp2); hs.checkArrayWithGlobalPC(tmp1, Integer.toString(0), "String[]_tmp2"); hs.setLevelOfArrayField(tmp1, Integer.toString(0), "String[]_tmp2"); tmp2[0] = "second element"; hs.joinLevelOfLocalAndAssignmentLevel("String[]_tmp1"); hs.checkArrayWithGlobalPC(twoD, Integer.toString(0), "String[][]_twoD" ); hs.setLevelOfArrayField(twoD, Integer.toString(0), "String[][]_twoD" ); twoD[0] = tmp1; hs.joinLevelOfLocalAndAssignmentLevel("String[]_tmp2"); hs.checkArrayWithGlobalPC(twoD, Integer.toString(1), "String[][]_twoD" ); hs.setLevelOfArrayField(twoD, Integer.toString(1), "String[][]_twoD"); twoD[1] = tmp2; assertEquals(CurrentSecurityDomain.bottom(), hs.getLocalLevel("String[][]_twoD")); hs.close(); logger.info("createArray success test finished"); } @Test public void readArray() { logger.info("readArray success test started"); HandleStmt hs = new HandleStmt(); hs.initHandleStmtUtils(false, 0); /* * x = a[i] * check x >= lpc * level(x) = (i, a, gpc, a_i) * * The following is the Jimple representation of: * String[][] arr = new String[][]{{"a"},{"b"}}; * String x = arr[1][0]; */ String[][] arr = new String[2][]; hs.addArrayToObjectMap(arr); hs.addLocal("String[][]_arr"); String[] inner1 = new String[1]; hs.addArrayToObjectMap(inner1); hs.addLocal("String[]_inner1"); String[] inner2 = new String[1]; hs.addArrayToObjectMap(inner2); hs.addLocal("String[]_inner2"); hs.checkArrayWithGlobalPC(inner1, Integer.toString(0), "String[]_inner1" ); hs.setLevelOfArrayField(inner1, Integer.toString(0), "String[]_inner1"); inner1[0] = "a"; hs.checkArrayWithGlobalPC(inner2, Integer.toString(0), "String[]_inner2" ); hs.setLevelOfArrayField(inner2, Integer.toString(0), "String[]_inner2"); inner2[0] = "b"; hs.joinLevelOfLocalAndAssignmentLevel("String[]_inner1"); hs.checkArrayWithGlobalPC(arr, Integer.toString(0), "String[][]_arr" ); hs.setLevelOfArrayField(arr, Integer.toString(0), "String[][]_arr"); arr[0] = inner1; hs.joinLevelOfLocalAndAssignmentLevel("String[]_inner2"); hs.checkArrayWithGlobalPC(arr, Integer.toString(1), "String[][]_arr" ); hs.setLevelOfArrayField(arr, Integer.toString(1), "String[][]_arr"); arr[1] = inner2; hs.addLocal("String[]_tmp"); hs.joinLevelOfArrayFieldAndAssignmentLevel(arr, Integer.toString(1)); hs.checkLocalPC("String[]_tmp"); hs.setLocalToCurrentAssingmentLevel("String[]_tmp"); String[] tmp = arr[1]; hs.addLocal("String_x"); hs.joinLevelOfArrayFieldAndAssignmentLevel(tmp, Integer.toString(0)); hs.checkLocalPC("String_x"); hs.setLocalToCurrentAssingmentLevel("String_x"); @SuppressWarnings("unused") String x = tmp[0]; hs.close(); logger.info("readArray success test finished"); } @Test public void writeArray() { logger.info("writeArray success test started"); HandleStmt hs = new HandleStmt(); hs.initHandleStmtUtils(false, 0); /* * a[i] = x; * check a_i >= join(gpc, a, i) * level(a[i]) = join(a,i,x) * * The following is the Jimple representation of: * String[][] arr = new String[][]{{"a"},{"b"}}; * String x = arr[1][0]; */ String[][] arr = new String[2][]; hs.addLocal("String[][]_arr"); hs.addArrayToObjectMap(arr); String[] inner1 = new String[1]; hs.addLocal("String[]_inner1"); hs.addArrayToObjectMap(inner1); String[] inner2 = new String[1]; hs.addLocal("String[]_inner2"); hs.addArrayToObjectMap(inner2); hs.checkArrayWithGlobalPC(inner1, Integer.toString(0), "String[]_inner1" ); hs.setLevelOfArrayField(inner1, Integer.toString(0), "String[]_inner1"); inner1[0] = "a"; hs.checkArrayWithGlobalPC(inner2, Integer.toString(0), "String[]_inner2" ); hs.setLevelOfArrayField(inner2, Integer.toString(0), "String[]_inner2"); inner2[0] = "b"; hs.joinLevelOfLocalAndAssignmentLevel("String[]_inner1"); hs.checkArrayWithGlobalPC(arr, Integer.toString(0), "String[][]_arr" ); hs.setLevelOfArrayField(arr, Integer.toString(0), "String[][]_arr"); arr[0] = inner1; hs.joinLevelOfLocalAndAssignmentLevel("String[]_inner2"); hs.checkArrayWithGlobalPC(arr, Integer.toString(1), "String[][]_arr" ); hs.setLevelOfArrayField(arr, Integer.toString(1), "String[][]_arr"); arr[1] = inner2; hs.joinLevelOfArrayFieldAndAssignmentLevel(arr, Integer.toString(0)); hs.addLocal("String[]_tmp"); hs.checkLocalPC("String[]_tmp"); hs.setLocalToCurrentAssingmentLevel("String[]_tmp"); String[] tmp = arr[0]; hs.checkArrayWithGlobalPC(tmp, Integer.toString(0), "String[]_tmp"); hs.setLevelOfArrayField(tmp, Integer.toString(0), "String[]_tmp"); tmp[0] = "a"; assertEquals("a", arr[0][0]); hs.close(); logger.info("writeArray success test finished"); } }
package org.axonframework.spring.config; import org.axonframework.commandhandling.AsynchronousCommandBus; import org.axonframework.commandhandling.CommandBus; import org.axonframework.commandhandling.CommandHandler; import org.axonframework.commandhandling.SimpleCommandBus; import org.axonframework.commandhandling.callbacks.FutureCallback; import org.axonframework.commandhandling.model.AggregateIdentifier; import org.axonframework.eventhandling.*; import org.axonframework.eventhandling.saga.AssociationValue; import org.axonframework.eventhandling.saga.SagaEventHandler; import org.axonframework.eventhandling.saga.StartSaga; import org.axonframework.eventhandling.saga.repository.SagaStore; import org.axonframework.eventhandling.saga.repository.inmemory.InMemorySagaStore; import org.axonframework.eventsourcing.EventSourcingHandler; import org.axonframework.eventsourcing.eventstore.EventStorageEngine; import org.axonframework.eventsourcing.eventstore.EventStore; import org.axonframework.eventsourcing.eventstore.inmemory.InMemoryEventStorageEngine; import org.axonframework.spring.stereotype.Aggregate; import org.axonframework.spring.stereotype.Saga; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import static org.axonframework.commandhandling.GenericCommandMessage.asCommandMessage; import static org.axonframework.commandhandling.model.AggregateLifecycle.apply; import static org.axonframework.eventhandling.GenericEventMessage.asEventMessage; import static org.junit.Assert.*; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration public class SpringAxonAutoConfigurerTest { @Autowired(required = false) private EventStore eventStore; @Autowired(required = false) private EventBus eventBus; @Autowired(required = false) private CommandBus commandBus; @Qualifier("customSagaStore") @Autowired(required = false) private SagaStore<Object> customSagaStore; @Qualifier("sagaStore") @Autowired(required = false) private SagaStore<Object> sagaStore; @Autowired private org.axonframework.config.Configuration axonConfig; @Autowired private Context.MyEventHandler myEventHandler; @Autowired private Context.MyOtherEventHandler myOtherEventHandler; @Autowired private Context.MyListenerInvocationErrorHandler myListenerInvocationErrorHandler; @Autowired private ApplicationContext applicationContext; @Test public void contextWiresMainComponents() throws Exception { assertNotNull(axonConfig); assertNotNull(axonConfig.eventBus()); assertNotNull(eventBus); assertNotNull(eventStore); assertNotNull(commandBus); assertTrue("Expected Axon to have configured an EventStore", eventBus instanceof EventStore); assertTrue("Expected provided commandbus implementation", commandBus instanceof AsynchronousCommandBus); assertNotNull(axonConfig.repository(Context.MyAggregate.class)); } @Test public void testEventHandlerIsRegistered() { eventBus.publish(asEventMessage("Testing 123")); assertNotNull("Expected EventBus to be wired", myEventHandler.eventBus); assertTrue(myEventHandler.received.contains("Testing 123")); assertTrue(myOtherEventHandler.received.contains("Testing 123")); } @Test public void testSagaManagerIsRegistered() { eventBus.publish(asEventMessage(new SomeEvent("id"))); assertTrue(Context.MySaga.events.contains("id")); assertEquals(1, customSagaStore.findSagas(Context.MySaga.class, new AssociationValue("id", "id")).size()); assertEquals(0, sagaStore.findSagas(Context.MySaga.class, new AssociationValue("id", "id")).size()); } @Test public void testWiresCommandHandler() { FutureCallback<Object, Object> callback = new FutureCallback<>(); commandBus.dispatch(asCommandMessage("test"), callback); callback.getResult(1, TimeUnit.SECONDS); FutureCallback<Object, Object> callback2 = new FutureCallback<>(); commandBus.dispatch(asCommandMessage("test"), callback2); commandBus.dispatch(asCommandMessage(1L), callback2); callback.getResult(1, TimeUnit.SECONDS); Context.MyCommandHandler ch = applicationContext.getBean(Context.MyCommandHandler.class); assertTrue(ch.getCommands().contains("test")); } @Test public void testListenerInvocationErrorHandler(){ eventBus.publish(asEventMessage("Testing 123")); assertNotNull("Expected EventBus to be wired", myEventHandler.eventBus); assertFalse(myListenerInvocationErrorHandler.received.isEmpty()); } @EnableAxon @Scope @Configuration public static class Context { @Primary @Bean(destroyMethod = "shutdown") public CommandBus commandBus() { return new AsynchronousCommandBus(); } @Bean public CommandBus simpleCommandBus() { return new SimpleCommandBus(); } @Bean public EventStorageEngine eventStorageEngine() { return new InMemoryEventStorageEngine(); } @Bean public SagaStore sagaStore() { return new InMemorySagaStore(); } @Bean public SagaStore customSagaStore() { return new InMemorySagaStore(); } @Aggregate public static class MyAggregate { @AggregateIdentifier private String id; @CommandHandler public void handle(Long command, MyEventHandler beanInjection) { assertNotNull(beanInjection); apply(command); } @EventSourcingHandler public void on(Long event, MyEventHandler beanInjection) { assertNotNull(beanInjection); this.id = Long.toString(event); } @EventSourcingHandler public void on(String event) { fail("Event Handler on aggregate shouldn't be invoked"); } } @Component public static class MyCommandHandler { private List<String> commands = new ArrayList<>(); @CommandHandler public void handle(String command) { commands.add(command); } public List<String> getCommands() { return commands; } } @Saga(sagaStore = "customSagaStore") public static class MySaga { private static List<String> events = new ArrayList<>(); @StartSaga @SagaEventHandler(associationProperty = "id") public void handle(SomeEvent event, MyEventHandler beanInjection) { assertNotNull(beanInjection); events.add(event.getId()); } } @Component public static class MyEventHandler { public List<String> received = new ArrayList<>(); private EventBus eventBus; @Autowired public MyEventHandler(EventBus eventBus) { this.eventBus = eventBus; } @EventHandler public void handle(String event, MyOtherEventHandler beanInjectionCheck) { assertNotNull(eventBus); assertNotNull(beanInjectionCheck); received.add(event); } } @Component public static class MyOtherEventHandler { public List<String> received = new ArrayList<>(); @EventHandler public void handle(String event, MyEventHandler beanInjection) { assertNotNull(beanInjection); received.add(event); } } @Component public static class FailingEventHandler{ @EventHandler public void handle(String event) { throw new RuntimeException(); } } @Component public static class MyListenerInvocationErrorHandler implements ListenerInvocationErrorHandler{ public List<Exception> received = new ArrayList<>(); @Override public void onError(Exception exception, EventMessage<?> event, EventListener eventListener) throws Exception { received.add(exception); } } } public static class SomeEvent { private final String id; public SomeEvent(String id) { this.id = id; } public String getId() { return id; } } }
/** * Copyright 2015-2018 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package zipkin.storage.mysql; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.sql.DataSource; import org.jooq.Condition; import org.jooq.Cursor; import org.jooq.DSLContext; import org.jooq.Record; import org.jooq.Row3; import org.jooq.SelectConditionStep; import org.jooq.SelectField; import org.jooq.SelectOffsetStep; import org.jooq.TableField; import org.jooq.TableOnConditionStep; import zipkin.Annotation; import zipkin.BinaryAnnotation; import zipkin.BinaryAnnotation.Type; import zipkin.DependencyLink; import zipkin.Endpoint; import zipkin.internal.DependencyLinker; import zipkin.internal.GroupByTraceId; import zipkin.internal.Nullable; import zipkin.internal.Pair; import zipkin.storage.QueryRequest; import zipkin.storage.SpanStore; import zipkin.storage.mysql.internal.generated.tables.ZipkinAnnotations; import zipkin2.Span; import static java.util.Collections.emptyList; import static java.util.stream.Collectors.groupingBy; import static org.jooq.impl.DSL.row; import static zipkin.BinaryAnnotation.Type.STRING; import static zipkin.Constants.CLIENT_ADDR; import static zipkin.Constants.CLIENT_SEND; import static zipkin.Constants.ERROR; import static zipkin.Constants.SERVER_ADDR; import static zipkin.Constants.SERVER_RECV; import static zipkin.internal.Util.UTF_8; import static zipkin.internal.Util.getDays; import static zipkin.storage.mysql.internal.generated.tables.ZipkinAnnotations.ZIPKIN_ANNOTATIONS; import static zipkin.storage.mysql.internal.generated.tables.ZipkinDependencies.ZIPKIN_DEPENDENCIES; import static zipkin.storage.mysql.internal.generated.tables.ZipkinSpans.ZIPKIN_SPANS; final class MySQLSpanStore implements SpanStore { private final DataSource datasource; private final DSLContexts context; private final Schema schema; private final boolean strictTraceId; MySQLSpanStore(DataSource datasource, DSLContexts context, Schema schema, boolean strictTraceId) { this.datasource = datasource; this.context = context; this.schema = schema; this.strictTraceId = strictTraceId; } private Endpoint endpoint(Record a) { String serviceName = a.getValue(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME); if (serviceName == null) return null; return Endpoint.builder() .serviceName(serviceName) .port(a.getValue(ZIPKIN_ANNOTATIONS.ENDPOINT_PORT)) .ipv4(a.getValue(ZIPKIN_ANNOTATIONS.ENDPOINT_IPV4)) .ipv6(maybeGet(a, ZIPKIN_ANNOTATIONS.ENDPOINT_IPV6, null)).build(); } SelectOffsetStep<? extends Record> toTraceIdQuery(DSLContext context, QueryRequest request) { long endTs = (request.endTs > 0 && request.endTs != Long.MAX_VALUE) ? request.endTs * 1000 : System.currentTimeMillis() * 1000; TableOnConditionStep<?> table = ZIPKIN_SPANS.join(ZIPKIN_ANNOTATIONS) .on(schema.joinCondition(ZIPKIN_ANNOTATIONS)); int i = 0; for (String key : request.annotations) { ZipkinAnnotations aTable = ZIPKIN_ANNOTATIONS.as("a" + i++); table = maybeOnService(table.join(aTable) .on(schema.joinCondition(aTable)) .and(aTable.A_KEY.eq(key)), aTable, request.serviceName); } for (Map.Entry<String, String> kv : request.binaryAnnotations.entrySet()) { ZipkinAnnotations aTable = ZIPKIN_ANNOTATIONS.as("a" + i++); table = maybeOnService(table.join(aTable) .on(schema.joinCondition(aTable)) .and(aTable.A_TYPE.eq(STRING.value)) .and(aTable.A_KEY.eq(kv.getKey())) .and(aTable.A_VALUE.eq(kv.getValue().getBytes(UTF_8))), aTable, request.serviceName); } List<SelectField<?>> distinctFields = new ArrayList<>(schema.spanIdFields); distinctFields.add(ZIPKIN_SPANS.START_TS.max()); SelectConditionStep<Record> dsl = context.selectDistinct(distinctFields) .from(table) .where(ZIPKIN_SPANS.START_TS.between(endTs - request.lookback * 1000, endTs)); if (request.serviceName != null) { dsl.and(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME.eq(request.serviceName)); } if (request.spanName != null) { dsl.and(ZIPKIN_SPANS.NAME.eq(request.spanName)); } if (request.minDuration != null && request.maxDuration != null) { dsl.and(ZIPKIN_SPANS.DURATION.between(request.minDuration, request.maxDuration)); } else if (request.minDuration != null) { dsl.and(ZIPKIN_SPANS.DURATION.greaterOrEqual(request.minDuration)); } return dsl .groupBy(schema.spanIdFields) .orderBy(ZIPKIN_SPANS.START_TS.max().desc()).limit(request.limit); } static TableOnConditionStep<?> maybeOnService(TableOnConditionStep<Record> table, ZipkinAnnotations aTable, String serviceName) { if (serviceName == null) return table; return table.and(aTable.ENDPOINT_SERVICE_NAME.eq(serviceName)); } List<List<zipkin.Span>> getTraces(@Nullable QueryRequest request, @Nullable Long traceIdHigh, @Nullable Long traceIdLow, boolean raw) { if (traceIdHigh != null && !strictTraceId) traceIdHigh = null; final Map<Pair<Long>, List<zipkin.Span>> spansWithoutAnnotations; final Map<Row3<Long, Long, Long>, List<Record>> dbAnnotations; try (Connection conn = datasource.getConnection()) { Condition traceIdCondition = request != null ? schema.spanTraceIdCondition(toTraceIdQuery(context.get(conn), request)) : schema.spanTraceIdCondition(traceIdHigh, traceIdLow); spansWithoutAnnotations = context.get(conn) .select(schema.spanFields) .from(ZIPKIN_SPANS).where(traceIdCondition) .stream() .map(r -> zipkin.Span.builder() .traceIdHigh(maybeGet(r, ZIPKIN_SPANS.TRACE_ID_HIGH, 0L)) .traceId(r.getValue(ZIPKIN_SPANS.TRACE_ID)) .name(r.getValue(ZIPKIN_SPANS.NAME)) .id(r.getValue(ZIPKIN_SPANS.ID)) .parentId(r.getValue(ZIPKIN_SPANS.PARENT_ID)) .timestamp(r.getValue(ZIPKIN_SPANS.START_TS)) .duration(r.getValue(ZIPKIN_SPANS.DURATION)) .debug(r.getValue(ZIPKIN_SPANS.DEBUG)) .build()) .collect( groupingBy((zipkin.Span s) -> Pair.create(s.traceIdHigh, s.traceId), LinkedHashMap::new, Collectors.<zipkin.Span>toList())); dbAnnotations = context.get(conn) .select(schema.annotationFields) .from(ZIPKIN_ANNOTATIONS) .where(schema.annotationsTraceIdCondition(spansWithoutAnnotations.keySet())) .orderBy(ZIPKIN_ANNOTATIONS.A_TIMESTAMP.asc(), ZIPKIN_ANNOTATIONS.A_KEY.asc()) .stream() .collect(groupingBy((Record a) -> row( maybeGet(a, ZIPKIN_ANNOTATIONS.TRACE_ID_HIGH, 0L), a.getValue(ZIPKIN_ANNOTATIONS.TRACE_ID), a.getValue(ZIPKIN_ANNOTATIONS.SPAN_ID) ), LinkedHashMap::new, Collectors.<Record>toList())); // LinkedHashMap preserves order while grouping } catch (SQLException e) { throw new RuntimeException("Error querying for " + request + ": " + e.getMessage()); } List<zipkin.Span> allSpans = new ArrayList<>(spansWithoutAnnotations.size()); for (List<zipkin.Span> spans : spansWithoutAnnotations.values()) { for (zipkin.Span s : spans) { zipkin.Span.Builder span = s.toBuilder(); Row3<Long, Long, Long> key = row(s.traceIdHigh, s.traceId, s.id); if (dbAnnotations.containsKey(key)) { for (Record a : dbAnnotations.get(key)) { Endpoint endpoint = endpoint(a); int type = a.getValue(ZIPKIN_ANNOTATIONS.A_TYPE); if (type == -1) { span.addAnnotation(Annotation.create( a.getValue(ZIPKIN_ANNOTATIONS.A_TIMESTAMP), a.getValue(ZIPKIN_ANNOTATIONS.A_KEY), endpoint)); } else { span.addBinaryAnnotation(BinaryAnnotation.create( a.getValue(ZIPKIN_ANNOTATIONS.A_KEY), a.getValue(ZIPKIN_ANNOTATIONS.A_VALUE), Type.fromValue(type), endpoint)); } } } allSpans.add(span.build()); } } return GroupByTraceId.apply(allSpans, strictTraceId, !raw); } static <T> T maybeGet(Record record, TableField<Record, T> field, T defaultValue) { if (record.fieldsRow().indexOf(field) < 0) { return defaultValue; } else { return record.get(field); } } @Override public List<List<zipkin.Span>> getTraces(QueryRequest request) { return getTraces(request, null, null, false); } @Override public List<zipkin.Span> getTrace(long traceId) { return getTrace(0L, traceId); } @Override public List<zipkin.Span> getTrace(long traceIdHigh, long traceIdLow) { List<List<zipkin.Span>> result = getTraces(null, traceIdHigh, traceIdLow, false); return result.isEmpty() ? null : result.get(0); } @Override public List<zipkin.Span> getRawTrace(long traceId) { return getRawTrace(0L, traceId); } @Override public List<zipkin.Span> getRawTrace(long traceIdHigh, long traceIdLow) { List<List<zipkin.Span>> result = getTraces(null, traceIdHigh, traceIdLow, true); return result.isEmpty() ? null : result.get(0); } @Override public List<String> getServiceNames() { try (Connection conn = datasource.getConnection()) { return context.get(conn) .selectDistinct(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME) .from(ZIPKIN_ANNOTATIONS) .where(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME.isNotNull() .and(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME.ne(""))) .fetch(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME); } catch (SQLException e) { throw new RuntimeException("Error querying for " + e + ": " + e.getMessage()); } } @Override public List<String> getSpanNames(String serviceName) { if (serviceName == null) return emptyList(); serviceName = serviceName.toLowerCase(); // service names are always lowercase! try (Connection conn = datasource.getConnection()) { return context.get(conn) .selectDistinct(ZIPKIN_SPANS.NAME) .from(ZIPKIN_SPANS) .join(ZIPKIN_ANNOTATIONS) .on(schema.joinCondition(ZIPKIN_ANNOTATIONS)) .where(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME.eq(serviceName)) .orderBy(ZIPKIN_SPANS.NAME) .fetch(ZIPKIN_SPANS.NAME); } catch (SQLException e) { throw new RuntimeException("Error querying for " + serviceName + ": " + e.getMessage()); } } @Override public List<DependencyLink> getDependencies(long endTs, @Nullable Long lookback) { try (Connection conn = datasource.getConnection()) { if (schema.hasPreAggregatedDependencies) { List<Date> days = getDays(endTs, lookback); List<DependencyLink> unmerged = context.get(conn) .select(schema.dependencyLinkFields) .from(ZIPKIN_DEPENDENCIES) .where(ZIPKIN_DEPENDENCIES.DAY.in(days)) .fetch((Record l) -> DependencyLink.builder() .parent(l.get(ZIPKIN_DEPENDENCIES.PARENT)) .child(l.get(ZIPKIN_DEPENDENCIES.CHILD)) .callCount(l.get(ZIPKIN_DEPENDENCIES.CALL_COUNT)) .errorCount(maybeGet(l, ZIPKIN_DEPENDENCIES.ERROR_COUNT, 0L)) .build() ); return DependencyLinker.merge(unmerged); } else { return aggregateDependencies(endTs, lookback, conn); } } catch (SQLException e) { throw new RuntimeException("Error querying dependencies for endTs " + endTs + " and lookback " + lookback + ": " + e.getMessage()); } } List<DependencyLink> aggregateDependencies(long endTs, @Nullable Long lookback, Connection conn) { endTs = endTs * 1000; // Lazy fetching the cursor prevents us from buffering the whole dataset in memory. Cursor<Record> cursor = context.get(conn) .selectDistinct(schema.dependencyLinkerFields) // left joining allows us to keep a mapping of all span ids, not just ones that have // special annotations. We need all span ids to reconstruct the trace tree. We need // the whole trace tree so that we can accurately skip local spans. .from(ZIPKIN_SPANS.leftJoin(ZIPKIN_ANNOTATIONS) // NOTE: we are intentionally grouping only on the low-bits of trace id. This buys time // for applications to upgrade to 128-bit instrumentation. .on(ZIPKIN_SPANS.TRACE_ID.eq(ZIPKIN_ANNOTATIONS.TRACE_ID).and( ZIPKIN_SPANS.ID.eq(ZIPKIN_ANNOTATIONS.SPAN_ID))) .and(ZIPKIN_ANNOTATIONS.A_KEY.in(CLIENT_SEND, CLIENT_ADDR, SERVER_RECV, SERVER_ADDR, ERROR))) .where(lookback == null ? ZIPKIN_SPANS.START_TS.lessOrEqual(endTs) : ZIPKIN_SPANS.START_TS.between(endTs - lookback * 1000, endTs)) // Grouping so that later code knows when a span or trace is finished. .groupBy(schema.dependencyLinkerGroupByFields).fetchLazy(); Iterator<Iterator<Span>> traces = new DependencyLinkV2SpanIterator.ByTraceId(cursor.iterator(), schema.hasTraceIdHigh); if (!traces.hasNext()) return Collections.emptyList(); DependencyLinker linker = new DependencyLinker(); while (traces.hasNext()) { linker.putTrace(traces.next()); } return linker.link(); } }
/** * Copyright (C) 2009-2015 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.network; import org.dasein.cloud.Taggable; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.HashMap; import java.util.Map; /** * Identifies a subnet of a VLAN in clouds that allow VLAN subnetting. * @version 2013.04 Simplified the construction of subnets and annoted methods * @version 2013.02 added Networkable interface * @since unknown */ public class Subnet implements Networkable, Taggable { static public @Nonnull Subnet getInstance(@Nonnull String ownerId, @Nonnull String regionId, @Nonnull String vlanId, @Nonnull String subnetId, @Nonnull SubnetState currentState, @Nonnull String name, @Nonnull String description, @Nonnull String cidr) { Subnet subnet = new Subnet(); subnet.providerOwnerId = ownerId; subnet.providerRegionId = regionId; subnet.providerVlanId = vlanId; subnet.providerSubnetId = subnetId; subnet.currentState = currentState; subnet.name = name; subnet.description = description; subnet.cidr = cidr; subnet.supportedTraffic = new IPVersion[] { IPVersion.IPV4 }; return subnet; } private AllocationPool[] allocationPools; private int availableIpAddresses; private String cidr; private SubnetState currentState; private String description; private RawAddress gateway; private String name; private String providerDataCenterId; private String providerOwnerId; private String providerRegionId; private String providerSubnetId; private String providerVlanId; private IPVersion[] supportedTraffic; private Map<String,String> tags; public Subnet() { } /** * In clouds where the subnet is constrained to a data center, this enables you to specify the data center to * which this subnet is constrained * @param dataCenterId the data center to which this subnet is constrained * @return this */ public @Nonnull Subnet constrainedToDataCenter(@Nonnull String dataCenterId) { this.providerDataCenterId = dataCenterId; return this; } @Override public boolean equals(Object ob) { if( ob == null ) { return false; } if( ob == this ) { return true; } if( !getClass().getName().equals(ob.getClass().getName()) ) { return false; } Subnet other = (Subnet)ob; if( !providerSubnetId.equals(other.providerSubnetId) ) { return false; } if( !providerVlanId.equals(other.providerVlanId) ) { return false; } //noinspection SimplifiableIfStatement if( !providerRegionId.equals(other.providerRegionId) ) { return false; } return providerOwnerId.equals(other.providerOwnerId); } /** * @return a list of IP address ranges from which IP addresses may be allocated */ public @Nonnull AllocationPool[] getAllocationPools() { if( allocationPools == null ) { allocationPools = new AllocationPool[0]; } return allocationPools; } /** * @return the count of available IP addresses */ public @Nonnegative int getAvailableIpAddresses() { return availableIpAddresses; } /** * @return the CIDR block associated with this subnet */ public String getCidr() { return cidr; } /** * @return the current state for the subnet */ public SubnetState getCurrentState() { return currentState; } /** * @return a user-friendly description for the subnet */ public @Nonnull String getDescription() { return description; } /** * @return the gateway IP address to be used in routing out of the subnet */ public @Nullable RawAddress getGateway() { return gateway; } /** * @return the name of the subnet */ public @Nonnull String getName() { return name; } /** * @return the data center, if any, to which this subnet is constrained */ public @Nullable String getProviderDataCenterId() { return providerDataCenterId; } /** * @return the account that owns this subnet or an empty string if it is a publicly shared subnet */ public @Nonnull String getProviderOwnerId() { return providerOwnerId; } /** * @return the region to which this subnet is constrained */ public @Nonnull String getProviderRegionId() { return providerRegionId; } /** * @return the unique ID for this subnet */ public @Nonnull String getProviderSubnetId() { return providerSubnetId; } /** * @return the ID for the VLAN of which this subnet is part */ public @Nonnull String getProviderVlanId() { return providerVlanId; } /** * @return a list of IP versions supported on this subnet */ public @Nonnull IPVersion[] getSupportedTraffic() { return ((supportedTraffic == null || supportedTraffic.length < 1) ? new IPVersion[] { IPVersion.IPV4 } : supportedTraffic); } /** * Specifies the allocation pools associated with the subnet. * @param pools one or more IP address ranges from which IPs may be allocated * @return this */ public @Nonnull Subnet havingAllocationPools(@Nonnull AllocationPool ... pools) { this.allocationPools = pools; return this; } @Override public @Nonnull Map<String,String> getTags() { if( tags == null ) { tags = new HashMap<String, String>(); } return tags; } @Override public void setTag(@Nonnull String key, @Nonnull String value) { if( tags == null ) { tags = new HashMap<String,String>(); } tags.put(key, value); } /** * Sets the meta-data tags and overwrites any existing values. * @param tags the tags to be set */ public void setTags(@Nonnull Map<String,String> tags) { this.tags = tags; } /** * Indicates that this subnet will support the specified kind of traffic. * @param traffic the traffic supported in this subnet * @return this */ public Subnet supportingTraffic(@Nonnull IPVersion ... traffic) { supportedTraffic = traffic; return this; } @Override public String toString() { return (cidr + " [" + providerOwnerId + "/" + providerSubnetId + "]"); } /** * Specifies the IP address of the gateway for this subnet. * @param gatewayIp the gateway IP address for the subnet * @return this */ public @Nonnull Subnet usingGateway(@Nonnull RawAddress gatewayIp) { this.gateway = gatewayIp; return this; } /** * Specifies the number of IP addresses currently available in this subnet. * @param count the number of IP addresses available in this subnet * @return this */ public @Nonnull Subnet withAvailableIpAddresses(int count) { this.availableIpAddresses = count; return this; } /******************************* DEPRECATED METHODS ***********************************/ /** * Sets the count of available IP addresses. * @param availableIpAddresses the number of available IP addresses * @deprecated Use the static factory methods */ public void setAvailableIpAddresses(@Nonnegative int availableIpAddresses) { this.availableIpAddresses = availableIpAddresses; } /** * Sets the CIDR associated with this subnet. * @param cidr the CIDR block for the subnet * @deprecated Use the static factory methods */ public void setCidr(@Nonnull String cidr) { this.cidr = cidr; } /** * Sets the current state of the subnet. * @param currentState the subnet's current state * @deprecated Use the static factory methods */ public void setCurrentState(@Nonnull SubnetState currentState) { this.currentState = currentState; } /** * Sets a description for the subnet. * @param description the subnet's description * @deprecated Use the static factory methods */ public void setDescription(@Nonnull String description) { this.description = description; } /** * Sets the name of the subnet. * @param name the name of the subnet * @deprecated Use the static factory methods */ public void setName(@Nonnull String name) { this.name = name; } /** * Sets the data center to which this subnet is constrained * @param providerDataCenterId the data center to which the subnet is constrained * @deprecated Use the static factory methods */ public void setProviderDataCenterId(@Nullable String providerDataCenterId) { this.providerDataCenterId = providerDataCenterId; } /** * Sets the account number that owns this subnet or an empty string for a shared subnet. * @param providerOwnerId the account number owning this subnet * @deprecated Use the static factory methods */ public void setProviderOwnerId(String providerOwnerId) { this.providerOwnerId = providerOwnerId; } /** * Sets the region to which this subnet is constrained. * @param providerRegionId the region to which the subnet is constrained * @deprecated Use the static factory methods */ public void setProviderRegionId(String providerRegionId) { this.providerRegionId = providerRegionId; } /** * Sets the unique ID for identifying this subnet with the cloud provider. * @param providerSubnetId the unique ID of the subnet * @deprecated Use the static factory methods */ public void setProviderSubnetId(@Nonnull String providerSubnetId) { this.providerSubnetId = providerSubnetId; } /** * Sets the ID of the VLAN in which this subnet sits. * @param providerVlanId the ID of the VLAN for the subnet * @deprecated Use the static factory methods */ public void setProviderVlanId(@Nonnull String providerVlanId) { this.providerVlanId = providerVlanId; } }
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugin.use.internal; import com.google.common.collect.Iterables; import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.Transformer; import org.gradle.api.artifacts.dsl.RepositoryHandler; import org.gradle.api.artifacts.repositories.ArtifactRepository; import org.gradle.api.artifacts.repositories.MavenArtifactRepository; import org.gradle.api.internal.initialization.ClassLoaderScope; import org.gradle.api.internal.initialization.ScriptHandlerInternal; import org.gradle.api.internal.plugins.ClassloaderBackedPluginDescriptorLocator; import org.gradle.api.internal.plugins.PluginDescriptorLocator; import org.gradle.api.internal.plugins.PluginImplementation; import org.gradle.api.internal.plugins.PluginInspector; import org.gradle.api.internal.plugins.PluginManagerInternal; import org.gradle.api.internal.plugins.PluginRegistry; import org.gradle.api.plugins.InvalidPluginException; import org.gradle.api.plugins.UnknownPluginException; import org.gradle.internal.classpath.CachedClasspathTransformer; import org.gradle.internal.classpath.ClassPath; import org.gradle.internal.exceptions.LocationAwareException; import org.gradle.plugin.management.internal.PluginRequestInternal; import org.gradle.plugin.management.internal.PluginRequests; import org.gradle.plugin.management.internal.PluginResolutionStrategyInternal; import org.gradle.plugin.use.PluginId; import org.gradle.plugin.use.resolve.internal.AlreadyOnClasspathPluginResolver; import org.gradle.plugin.use.resolve.internal.PluginResolution; import org.gradle.plugin.use.resolve.internal.PluginResolutionResult; import org.gradle.plugin.use.resolve.internal.PluginResolveContext; import org.gradle.plugin.use.resolve.internal.PluginResolver; import org.gradle.util.TextUtil; import javax.annotation.Nullable; import java.util.Collections; import java.util.Formatter; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import static com.google.common.collect.Maps.newLinkedHashMap; import static com.google.common.collect.Sets.newHashSet; import static com.google.common.collect.Sets.newLinkedHashSet; import static org.gradle.util.CollectionUtils.collect; public class DefaultPluginRequestApplicator implements PluginRequestApplicator { private final PluginRegistry pluginRegistry; private final PluginResolverFactory pluginResolverFactory; private final PluginRepositoriesProvider pluginRepositoriesProvider; private final PluginResolutionStrategyInternal pluginResolutionStrategy; private final PluginInspector pluginInspector; private final CachedClasspathTransformer cachedClasspathTransformer; public DefaultPluginRequestApplicator(PluginRegistry pluginRegistry, PluginResolverFactory pluginResolver, PluginRepositoriesProvider pluginRepositoriesProvider, PluginResolutionStrategyInternal pluginResolutionStrategy, PluginInspector pluginInspector, CachedClasspathTransformer cachedClasspathTransformer) { this.pluginRegistry = pluginRegistry; this.pluginResolverFactory = pluginResolver; this.pluginRepositoriesProvider = pluginRepositoriesProvider; this.pluginResolutionStrategy = pluginResolutionStrategy; this.pluginInspector = pluginInspector; this.cachedClasspathTransformer = cachedClasspathTransformer; } @Override public void applyPlugins(final PluginRequests requests, final ScriptHandlerInternal scriptHandler, @Nullable final PluginManagerInternal target, final ClassLoaderScope classLoaderScope) { if (target == null || requests.isEmpty()) { defineScriptHandlerClassScope(scriptHandler, classLoaderScope, Collections.<PluginImplementation<?>>emptyList()); return; } final PluginResolver effectivePluginResolver = wrapInAlreadyInClasspathResolver(classLoaderScope); List<Result> results = collect(requests, new Transformer<Result, PluginRequestInternal>() { @Override public Result transform(PluginRequestInternal request) { PluginRequestInternal configuredRequest = pluginResolutionStrategy.applyTo(request); return resolveToFoundResult(effectivePluginResolver, configuredRequest); } }); // Could be different to ids in the requests as they may be unqualified final Map<Result, PluginId> legacyActualPluginIds = newLinkedHashMap(); final Map<Result, PluginImplementation<?>> pluginImpls = newLinkedHashMap(); final Map<Result, PluginImplementation<?>> pluginImplsFromOtherLoaders = newLinkedHashMap(); if (!results.isEmpty()) { final RepositoryHandler repositories = scriptHandler.getRepositories(); addPluginArtifactRepositories(repositories); final Set<String> repoUrls = newLinkedHashSet(); for (final Result result : results) { applyPlugin(result.request, result.found.getPluginId(), new Runnable() { @Override public void run() { result.found.execute(new PluginResolveContext() { @Override public void addLegacy(PluginId pluginId, final String m2RepoUrl, Object dependencyNotation) { repoUrls.add(m2RepoUrl); addLegacy(pluginId, dependencyNotation); } @Override public void addLegacy(PluginId pluginId, Object dependencyNotation) { legacyActualPluginIds.put(result, pluginId); scriptHandler.addScriptClassPathDependency(dependencyNotation); } @Override public void add(PluginImplementation<?> plugin) { pluginImpls.put(result, plugin); } @Override public void addFromDifferentLoader(PluginImplementation<?> plugin) { pluginImplsFromOtherLoaders.put(result, plugin); } }); } }); } addMissingMavenRepositories(repositories, repoUrls); } defineScriptHandlerClassScope(scriptHandler, classLoaderScope, pluginImplsFromOtherLoaders.values()); // We're making an assumption here that the target's plugin registry is backed classLoaderScope. // Because we are only build.gradle files right now, this holds. // It won't for arbitrary scripts though. for (final Map.Entry<Result, PluginId> entry : legacyActualPluginIds.entrySet()) { final PluginRequestInternal request = entry.getKey().request; final PluginId id = entry.getValue(); applyPlugin(request, id, new Runnable() { @Override public void run() { if (request.isApply()) { target.apply(id.toString()); } } }); } for (final Map.Entry<Result, PluginImplementation<?>> entry : Iterables.concat(pluginImpls.entrySet(), pluginImplsFromOtherLoaders.entrySet())) { final Result result = entry.getKey(); applyPlugin(result.request, result.found.getPluginId(), new Runnable() { @Override public void run() { if (result.request.isApply()) { target.apply(entry.getValue()); } } }); } } private void addPluginArtifactRepositories(RepositoryHandler repositories) { repositories.addAll(pluginRepositoriesProvider.getPluginRepositories()); } private void addMissingMavenRepositories(RepositoryHandler repositories, Set<String> repoUrls) { if (repoUrls.isEmpty()) { return; } final Set<String> existingMavenUrls = existingMavenUrls(repositories); for (final String repoUrl : repoUrls) { if (!existingMavenUrls.contains(repoUrl)) { maven(repositories, repoUrl); } } } private void maven(RepositoryHandler repositories, final String m2RepoUrl) { repositories.maven(new Action<MavenArtifactRepository>() { @Override public void execute(MavenArtifactRepository mavenArtifactRepository) { mavenArtifactRepository.setUrl(m2RepoUrl); } }); } private Set<String> existingMavenUrls(RepositoryHandler repositories) { Set<String> mavenUrls = newHashSet(); for (ArtifactRepository repo : repositories) { if (repo instanceof MavenArtifactRepository) { mavenUrls.add(((MavenArtifactRepository) repo).getUrl().toString()); } } return mavenUrls; } private void defineScriptHandlerClassScope(ScriptHandlerInternal scriptHandler, ClassLoaderScope classLoaderScope, Iterable<PluginImplementation<?>> pluginsFromOtherLoaders) { ClassPath classPath = scriptHandler.getScriptClassPath(); ClassPath cachedJarClassPath = cachedClasspathTransformer.transform(classPath); classLoaderScope.export(cachedJarClassPath); for (PluginImplementation<?> pluginImplementation : pluginsFromOtherLoaders) { classLoaderScope.export(pluginImplementation.asClass().getClassLoader()); } classLoaderScope.lock(); } private PluginResolver wrapInAlreadyInClasspathResolver(ClassLoaderScope classLoaderScope) { ClassLoaderScope parentLoaderScope = classLoaderScope.getParent(); PluginDescriptorLocator scriptClasspathPluginDescriptorLocator = new ClassloaderBackedPluginDescriptorLocator(parentLoaderScope.getExportClassLoader()); PluginResolver pluginResolver = pluginResolverFactory.create(); return new AlreadyOnClasspathPluginResolver(pluginResolver, pluginRegistry, parentLoaderScope, scriptClasspathPluginDescriptorLocator, pluginInspector); } private void applyPlugin(PluginRequestInternal request, PluginId id, Runnable applicator) { try { try { applicator.run(); } catch (UnknownPluginException e) { throw couldNotApply(request, id, e); } catch (Exception e) { throw exceptionOccurred(request, e); } } catch (Exception e) { throw new LocationAwareException(e, request.getScriptDisplayName(), request.getLineNumber()); } } private InvalidPluginException couldNotApply(PluginRequestInternal request, PluginId id, UnknownPluginException cause) { return new InvalidPluginException( String.format( "Could not apply requested plugin %s as it does not provide a plugin with id '%s'." + " This is caused by an incorrect plugin implementation." + " Please contact the plugin author(s).", request, id), cause); } private InvalidPluginException exceptionOccurred(PluginRequestInternal request, Exception e) { return new InvalidPluginException(String.format("An exception occurred applying plugin request %s", request), e); } private Result resolveToFoundResult(PluginResolver effectivePluginResolver, PluginRequestInternal request) { Result result = new Result(request); try { effectivePluginResolver.resolve(request, result); } catch (Exception e) { throw new LocationAwareException( new GradleException(String.format("Error resolving plugin %s", request.getDisplayName()), e), request.getScriptDisplayName(), request.getLineNumber()); } if (!result.isFound()) { String message = buildNotFoundMessage(request, result); Exception exception = new UnknownPluginException(message); throw new LocationAwareException(exception, request.getScriptDisplayName(), request.getLineNumber()); } return result; } private String buildNotFoundMessage(PluginRequestInternal pluginRequest, Result result) { if (result.notFoundList.isEmpty()) { // this shouldn't happen, resolvers should call notFound() return String.format("Plugin %s was not found", pluginRequest.getDisplayName()); } else { Formatter sb = new Formatter(); sb.format("Plugin %s was not found in any of the following sources:%n", pluginRequest.getDisplayName()); for (NotFound notFound : result.notFoundList) { sb.format("%n- %s (%s)", notFound.source, notFound.message); if (notFound.detail != null) { sb.format("%n%s", TextUtil.indent(notFound.detail, " ")); } } return sb.toString(); } } private static class NotFound { private final String source; private final String message; private final String detail; private NotFound(String source, String message, String detail) { this.source = source; this.message = message; this.detail = detail; } } private static class Result implements PluginResolutionResult { private final List<NotFound> notFoundList = new LinkedList<NotFound>(); private final PluginRequestInternal request; private PluginResolution found; public Result(PluginRequestInternal request) { this.request = request; } @Override public void notFound(String sourceDescription, String notFoundMessage) { notFoundList.add(new NotFound(sourceDescription, notFoundMessage, null)); } @Override public void notFound(String sourceDescription, String notFoundMessage, String notFoundDetail) { notFoundList.add(new NotFound(sourceDescription, notFoundMessage, notFoundDetail)); } @Override public void found(String sourceDescription, PluginResolution pluginResolution) { found = pluginResolution; } @Override public boolean isFound() { return found != null; } } }
package com.planet_ink.coffee_mud.Behaviors; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /** * <p>Title: False Realities Presents FieryRoom</p> * <p>Description: False Realities - Discover your true destiny and change history...</p> * <p>Company: http://www.falserealities.com</p> * @author Tulath (a.k.a.) Jeremy Vyska */ @SuppressWarnings({"unchecked","rawtypes"}) public class FieryRoom extends ActiveTicker { @Override public String ID() { return "FieryRoom"; } @Override protected int canImproveCode() { return Behavior.CAN_ROOMS; } private String newDisplay = ""; private String newDesc = ""; private int directDamage = 10; private int eqChance = 0; private int burnTicks = 12; private boolean noStop = false; private boolean noNpc = false; private boolean noFireText = false; private String[] FireTexts = {"The fire here crackles and burns."}; public FieryRoom() { super(); minTicks = 5; maxTicks = 10; chance = 100; tickReset(); } @Override public String accountForYourself() { return "on fire"; } @Override public void setParms(String newParms) { super.setParms(newParms); newDisplay = CMParms.getParmStr(newParms, "Title", "A Charred Ruin"); newDesc = CMParms.getParmStr(newParms, "Description", "Whatever was once here is now nothing more than ash."); directDamage = CMParms.getParmInt(newParms, "damage", 10); eqChance = CMParms.getParmInt(newParms, "eqchance", 0); burnTicks = CMParms.getParmInt(newParms, "burnticks", 12); final Vector<String> V=CMParms.parse(newParms.toUpperCase()); noStop=(V.contains("NOSTOP")); noNpc=(V.contains("NONPC")); noFireText=(V.contains("NOFIRETEXT")); setFireTexts(); } private void setFireTexts() { final String[] newFireTexts = {"The fire here crackles and burns.", "The intense heat of the fire here is "+(directDamage>0?"very painful":"very unpleasant")+".", "The flames dance around you"+(eqChance>0?", licking at your clothes.":"."), "The fire is burning out of control. You fear for your safety"+(noStop?".":" as it looks like this place is being completely consumed."), "You hear popping and sizzling as something burns.", "The smoke here is very thick and you worry about whether you will be able to breathe."}; FireTexts = newFireTexts; } @Override public boolean tick(Tickable ticking, int tickID) { super.tick(ticking, tickID); // on every tick, we may do damage OR eq handling. if (!(ticking instanceof Room)) return super.tick(ticking, tickID); final Room room = (Room) ticking; if (canAct(ticking, tickID)) { if ( (directDamage > 0) || (eqChance > 0)) { // for each inhab, do directDamage to them. for (int i = 0; i < room.numInhabitants(); i++) { final MOB inhab = room.fetchInhabitant(i); if(inhab==null) continue; if (inhab.isMonster()) { boolean reallyAffect = true; if (noNpc) { reallyAffect = false; final Set<MOB> group = inhab.getGroupMembers(new HashSet<MOB>()); for (final Object element : group) { final MOB follower = (MOB) element; if (! (follower.isMonster())) { reallyAffect = true; break; } } } if (reallyAffect) { dealDamage(inhab); if (CMLib.dice().rollPercentage() > eqChance) eqRoast(inhab); } } else { if((!CMSecurity.isAllowed(inhab,inhab.location(),CMSecurity.SecFlag.ORDER)) &&(!CMSecurity.isAllowed(inhab,inhab.location(),CMSecurity.SecFlag.CMDROOMS))) { dealDamage(inhab); if (CMLib.dice().rollPercentage() > eqChance) eqRoast(inhab); } } } } // % chance of burning each item in the room. roastRoom(room); // The tick happened. If NOT NoFireText, Do flame emotes if(!noFireText) { final String pickedText=FireTexts[CMLib.dice().roll(1,FireTexts.length,0)-1]; room.showHappens(CMMsg.MSG_OK_ACTION,pickedText); } } if (!noStop) { if(burnTicks==0) { // NOSTOP is false. This means the room gets set // to the torched text and the behavior goes away. room.setDisplayText(newDisplay); room.setDescription(newDesc); room.delBehavior(this); } else --burnTicks; } return super.tick(ticking, tickID); } private void dealDamage(MOB mob) { final MOB M=CMLib.map().getFactoryMOB(mob.location()); M.setName(L("fire")); CMLib.combat().postDamage(M, mob, null, directDamage, CMMsg.MASK_ALWAYS | CMMsg.MASK_MALICIOUS|CMMsg.TYP_FIRE, Weapon.TYPE_BURNING, L("The fire here <DAMAGE> <T-NAME>!")); M.destroy(); } private void eqRoast(MOB mob) { final Item target = getSomething(mob); if (target != null) { final MOB M=CMLib.map().getFactoryMOB(mob.location()); M.setName(L("fire")); switch (target.material() & RawMaterial.MATERIAL_MASK) { case RawMaterial.MATERIAL_GLASS: case RawMaterial.MATERIAL_METAL: case RawMaterial.MATERIAL_MITHRIL: case RawMaterial.MATERIAL_SYNTHETIC: case RawMaterial.MATERIAL_PRECIOUS: case RawMaterial.MATERIAL_ROCK: case RawMaterial.MATERIAL_UNKNOWN: { // all these we'll make get hot and be dropped. final int damage = CMLib.dice().roll(1, 6, 1); CMLib.combat().postDamage(M, mob, null, damage, CMMsg.MASK_ALWAYS | CMMsg.MASK_MALICIOUS|CMMsg.TYP_FIRE, Weapon.TYPE_BURNING, target.name() + " <DAMAGE> <T-NAME>!"); if (CMLib.dice().rollPercentage() < mob.charStats().getStat(CharStats.STAT_STRENGTH)) { CMLib.commands().postDrop(mob, target, false, false, false); } break; } default: { final Ability burn = CMClass.getAbility("Burning"); if (burn != null) { mob.location().showHappens(CMMsg.MSG_OK_ACTION, L("@x1 begins to burn!",target.Name())); burn.invoke(M, target, true, 0); target.recoverPhyStats(); } break; } } M.destroy(); } } private static void roastRoom(Room which) { final MOB mob=CMLib.map().getFactoryMOB(which); mob.setName(("fire")); for(int i=0;i<which.numItems();i++) { final Item target=which.getItem(i); final Ability burn = CMClass.getAbility("Burning"); if((burn != null)&&(CMLib.dice().rollPercentage()>60)) { which.showHappens(CMMsg.MSG_OK_ACTION,CMLib.lang().L("@x1 begins to burn!",target.Name())); burn.invoke(mob,target,true,0); target.recoverPhyStats(); } } mob.destroy(); } private static Item getSomething(MOB mob) { final Vector good = new Vector(); final Vector great = new Vector(); Item target = null; for (int i = 0; i < mob.numItems(); i++) { final Item I = mob.getItem(i); if (I.amWearingAt(Wearable.IN_INVENTORY)) good.addElement(I); else great.addElement(I); } if (great.size() > 0) target = (Item) great.elementAt(CMLib.dice().roll(1, great.size(), -1)); else if (good.size() > 0) target = (Item) good.elementAt(CMLib.dice().roll(1, good.size(), -1)); return target; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.chukwa.ChunkImpl; import org.apache.hadoop.chukwa.datacollection.ChunkReceiver; import org.apache.hadoop.chukwa.datacollection.adaptor.AbstractAdaptor; import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorException; import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy; import org.apache.hadoop.conf.Configuration; import org.apache.log4j.Logger; /** * A base class for file tailing adaptors. * Intended to mandate as little policy as possible, and to use as * few system resources as possible. * * * If the file does not exist, this class will continue to retry quietly * forever and will start tailing if it's eventually created. */ public class LWFTAdaptor extends AbstractAdaptor { /** * This is the maximum amount we'll read from any one file before moving on to * the next. This way, we get quick response time for other files if one file * is growing rapidly. * */ public static final int DEFAULT_MAX_READ_SIZE = 128 * 1024; public static final String MAX_READ_SIZE_OPT = "chukwaAgent.fileTailingAdaptor.maxReadSize"; int MAX_READ_SIZE = DEFAULT_MAX_READ_SIZE; static Logger log; static FileTailer tailer; static { tailer = null; log = Logger.getLogger(FileTailingAdaptor.class); } /** * next PHYSICAL offset to read */ protected long fileReadOffset; /** * The logical offset of the first byte of the file */ protected long offsetOfFirstByte = 0; protected Configuration conf = null; /** * The timestamp of last slurping. */ protected long lastSlurpTime = 0l; File toWatch; @Override public void start(long offset) { synchronized(LWFTAdaptor.class) { if(tailer == null) tailer = new FileTailer(control.getConfiguration()); } this.fileReadOffset = offset - offsetOfFirstByte; tailer.startWatchingFile(this); } /** * @see org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor#getCurrentStatus() */ public String getCurrentStatus() { return type.trim() + " " + offsetOfFirstByte + " " + toWatch.getPath(); } public String toString() { return "Lightweight Tailer on " + toWatch; } public String getStreamName() { return toWatch.getPath(); } @Override public String parseArgs(String params) { conf = control.getConfiguration(); MAX_READ_SIZE = conf.getInt(MAX_READ_SIZE_OPT, DEFAULT_MAX_READ_SIZE); Pattern cmd = Pattern.compile("(\\d+)\\s+(.+)\\s?"); Matcher m = cmd.matcher(params); if (m.matches()) { //check for first-byte offset. If absent, assume we just got a path. offsetOfFirstByte = Long.parseLong(m.group(1)); toWatch = new File(m.group(2)); } else { toWatch = new File(params.trim()); } return toWatch.getAbsolutePath(); } @Override public long shutdown(AdaptorShutdownPolicy shutdownPolicy) throws AdaptorException { tailer.stopWatchingFile(this); return fileReadOffset + offsetOfFirstByte; } /** * Extract records from a byte sequence * * @param eq the queue to stick the new chunk[s] in * @param buffOffsetInFile the byte offset in the stream at which buf[] begins * @param buf the byte buffer to extract records from * @return the number of bytes processed * @throws InterruptedException */ protected int extractRecords(ChunkReceiver eq, long buffOffsetInFile, byte[] buf) throws InterruptedException { if(buf.length == 0) return 0; ChunkImpl chunk = new ChunkImpl(type, toWatch.getAbsolutePath(), buffOffsetInFile + buf.length, buf, this); eq.add(chunk); return buf.length; } protected boolean slurp(long len, RandomAccessFile reader) throws IOException, InterruptedException{ boolean hasMoreData = false; log.debug("Adaptor|" + adaptorID + "|seeking|" + fileReadOffset); reader.seek(fileReadOffset); long bufSize = len - fileReadOffset; if (bufSize > MAX_READ_SIZE) { bufSize = MAX_READ_SIZE; hasMoreData = true; } byte[] buf = new byte[(int) bufSize]; long curOffset = fileReadOffset; lastSlurpTime = System.currentTimeMillis(); int bufferRead = reader.read(buf); assert reader.getFilePointer() == fileReadOffset + bufSize : " event size arithmetic is broken: " + " pointer is " + reader.getFilePointer() + " but offset is " + fileReadOffset + bufSize; int bytesUsed = extractRecords(dest, fileReadOffset + offsetOfFirstByte, buf); // === WARNING === // If we couldn't found a complete record AND // we cannot read more, i.e bufferRead == MAX_READ_SIZE // it's because the record is too BIG // So log.warn, and drop current buffer so we can keep moving // instead of being stopped at that point for ever if (bytesUsed == 0 && bufferRead == MAX_READ_SIZE) { log.warn("bufferRead == MAX_READ_SIZE AND bytesUsed == 0, dropping current buffer: startOffset=" + curOffset + ", MAX_READ_SIZE=" + MAX_READ_SIZE + ", for " + toWatch.getPath()); bytesUsed = buf.length; } fileReadOffset = fileReadOffset + bytesUsed; log.debug("Adaptor|" + adaptorID + "|start|" + curOffset + "|end|" + fileReadOffset); return hasMoreData; } public boolean tailFile() throws InterruptedException { boolean hasMoreData = false; try { //if file doesn't exist, length =0 and we just keep waiting for it. //if(!toWatch.exists()) // deregisterAndStop(false); long len = toWatch.length(); if(len < fileReadOffset) { //file shrank; probably some data went missing. handleShrunkenFile(len); } else if(len > fileReadOffset) { RandomAccessFile reader = new RandomAccessFile(toWatch, "r"); hasMoreData = slurp(len, reader); reader.close(); } } catch(IOException e) { log.warn("IOException in tailer", e); deregisterAndStop(); } return hasMoreData; } private void handleShrunkenFile(long measuredLen) { log.info("file "+ toWatch +"shrank from " + fileReadOffset + " to " + measuredLen); offsetOfFirstByte = measuredLen; fileReadOffset = 0; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.resourcemanager.appplatform; import com.azure.core.management.Region; import com.azure.core.test.annotation.DoNotRecord; import com.azure.resourcemanager.appplatform.models.RuntimeVersion; import com.azure.resourcemanager.appplatform.models.SpringApp; import com.azure.resourcemanager.appplatform.models.SpringAppDeployment; import com.azure.resourcemanager.appplatform.models.SpringService; import com.azure.resourcemanager.appservice.models.AppServiceDomain; import com.azure.resourcemanager.dns.models.DnsZone; import com.azure.resourcemanager.keyvault.models.CertificatePermissions; import com.azure.resourcemanager.keyvault.models.SecretPermissions; import com.azure.resourcemanager.keyvault.models.Vault; import com.azure.resourcemanager.resources.fluentcore.arm.CountryIsoCode; import com.azure.resourcemanager.resources.fluentcore.arm.CountryPhoneCode; import com.azure.security.keyvault.certificates.CertificateClient; import com.azure.security.keyvault.certificates.CertificateClientBuilder; import com.azure.security.keyvault.certificates.models.ImportCertificateOptions; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.utils.IOUtils; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; public class SpringCloudLiveOnlyTest extends AppPlatformTest { private static final String PIGGYMETRICS_CONFIG_URL = "https://github.com/Azure-Samples/piggymetrics-config"; private static final String GATEWAY_JAR_URL = "https://github.com/weidongxu-microsoft/azure-sdk-for-java-management-tests/raw/master/spring-cloud/gateway.jar"; private static final String PIGGYMETRICS_TAR_GZ_URL = "https://github.com/weidongxu-microsoft/azure-sdk-for-java-management-tests/raw/master/spring-cloud/piggymetrics.tar.gz"; private static final String SPRING_CLOUD_SERVICE_PRINCIPAL = "03b39d0f-4213-4864-a245-b1476ec03169"; @Test @DoNotRecord public void canCRUDDeployment() throws Exception { if (skipInPlayback()) { return; } String serviceName = generateRandomResourceName("springsvc", 15); String appName = "gateway"; String deploymentName = generateRandomResourceName("deploy", 15); String deploymentName1 = generateRandomResourceName("deploy", 15); Region region = Region.US_EAST; SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withNewResourceGroup(rgName) .create(); File jarFile = new File("gateway.jar"); if (!jarFile.exists()) { HttpURLConnection connection = (HttpURLConnection) new URL(GATEWAY_JAR_URL).openConnection(); connection.connect(); try (InputStream inputStream = connection.getInputStream(); OutputStream outputStream = new FileOutputStream(jarFile)) { IOUtils.copy(inputStream, outputStream); } connection.disconnect(); } SpringApp app = service.apps().define(appName) .defineActiveDeployment(deploymentName) .withJarFile(jarFile) .withInstance(2) .withCpu(2) .withMemory(4) .withRuntime(RuntimeVersion.JAVA_11) .attach() .withDefaultPublicEndpoint() .create(); Assertions.assertNotNull(app.url()); Assertions.assertNotNull(app.activeDeploymentName()); Assertions.assertEquals(1, app.deployments().list().stream().count()); Assertions.assertTrue(requestSuccess(app.url())); SpringAppDeployment deployment = app.getActiveDeployment(); Assertions.assertEquals(2, deployment.settings().cpu()); Assertions.assertEquals(4, deployment.settings().memoryInGB()); Assertions.assertEquals(RuntimeVersion.JAVA_11, deployment.settings().runtimeVersion()); Assertions.assertEquals(2, deployment.instances().size()); File gzFile = new File("piggymetrics.tar.gz"); if (!gzFile.exists()) { HttpURLConnection connection = (HttpURLConnection) new URL(PIGGYMETRICS_TAR_GZ_URL).openConnection(); connection.connect(); try (InputStream inputStream = connection.getInputStream(); OutputStream outputStream = new FileOutputStream(gzFile)) { IOUtils.copy(inputStream, outputStream); } connection.disconnect(); } deployment = app.deployments().define(deploymentName1) .withSourceCodeTarGzFile(gzFile) .withTargetModule("gateway") .withActivation() .create(); app.refresh(); Assertions.assertEquals(deploymentName1, app.activeDeploymentName()); Assertions.assertEquals(1, deployment.settings().cpu()); Assertions.assertNotNull(deployment.getLogFileUrl()); Assertions.assertTrue(requestSuccess(app.url())); app.update() .withoutDefaultPublicEndpoint() .apply(); Assertions.assertFalse(app.isPublic()); app.deployments().deleteByName(deploymentName); Assertions.assertEquals(1, app.deployments().list().stream().count()); } @Test @DoNotRecord public void canCreateCustomDomainWithSsl() throws Exception { if (skipInPlayback()) { return; } String domainName = generateRandomResourceName("jsdkdemo-", 20) + ".com"; String certOrderName = generateRandomResourceName("cert", 15); String vaultName = generateRandomResourceName("vault", 15); String certName = generateRandomResourceName("cert", 15); String serviceName = generateRandomResourceName("springsvc", 15); String appName = "gateway"; Region region = Region.US_EAST; allowAllSSL(); String cerPassword = password(); String resourcePath = Paths.get(this.getClass().getResource("/session-records").toURI()).getParent().toString(); String cerPath = resourcePath + domainName + ".cer"; String pfxPath = resourcePath + domainName + ".pfx"; createCertificate(cerPath, pfxPath, domainName, cerPassword, "ssl." + domainName, "ssl." + domainName); byte[] certificate = readAllBytes(new FileInputStream(pfxPath)); appPlatformManager.resourceManager().resourceGroups().define(rgName) .withRegion(region) .create(); // create custom domain and certificate DnsZone dnsZone = dnsZoneManager.zones().define(domainName) .withExistingResourceGroup(rgName) .create(); AppServiceDomain domain = appServiceManager.domains().define(domainName) .withExistingResourceGroup(rgName) .defineRegistrantContact() .withFirstName("Jon") .withLastName("Doe") .withEmail("jondoe@contoso.com") .withAddressLine1("123 4th Ave") .withCity("Redmond") .withStateOrProvince("WA") .withCountry(CountryIsoCode.UNITED_STATES) .withPostalCode("98052") .withPhoneCountryCode(CountryPhoneCode.UNITED_STATES) .withPhoneNumber("4258828080") .attach() .withDomainPrivacyEnabled(true) .withAutoRenewEnabled(false) .withExistingDnsZone(dnsZone) .create(); Vault vault = keyVaultManager.vaults().define(vaultName) .withRegion(region) .withExistingResourceGroup(rgName) .defineAccessPolicy() .forServicePrincipal(clientIdFromFile()) .allowSecretAllPermissions() .allowCertificateAllPermissions() .attach() .defineAccessPolicy() .forServicePrincipal(SPRING_CLOUD_SERVICE_PRINCIPAL) .allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST) .allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST) .attach() .create(); // upload certificate CertificateClient certificateClient = new CertificateClientBuilder() .vaultUrl(vault.vaultUri()) .pipeline(appPlatformManager.httpPipeline()) .buildClient(); certificateClient.importCertificate( new ImportCertificateOptions(certName, certificate) .setPassword(cerPassword) .setEnabled(true) ); // get thumbprint KeyStore store = KeyStore.getInstance("PKCS12"); store.load(new ByteArrayInputStream(certificate), cerPassword.toCharArray()); String alias = Collections.list(store.aliases()).get(0); String thumbprint = printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded())); SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withExistingResourceGroup(rgName) .withCertificate("test", vault.vaultUri(), certName) .create(); service.apps().define(appName).withDefaultActiveDeployment().withDefaultPublicEndpoint().create(); SpringApp app = service.apps().getByName(appName); dnsZone.update() .withCNameRecordSet("www", app.fqdn()) .withCNameRecordSet("ssl", app.fqdn()) .apply(); app.update() .withoutDefaultPublicEndpoint() .withCustomDomain(String.format("www.%s", domainName)) .withCustomDomain(String.format("ssl.%s", domainName), thumbprint) .apply(); Assertions.assertTrue(app.customDomains().validate(String.format("www.%s", domainName)).isValid()); Assertions.assertTrue(requestSuccess(String.format("http://www.%s", domainName))); Assertions.assertTrue(requestSuccess(String.format("https://ssl.%s", domainName))); app.update() .withHttpsOnly() .apply(); Assertions.assertTrue(checkRedirect(String.format("http://ssl.%s", domainName))); } private void extraTarGzSource(File folder, URL url) throws IOException { HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.connect(); try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) { TarArchiveEntry entry; while ((entry = inputStream.getNextTarEntry()) != null) { if (entry.isDirectory()) { continue; } File file = new File(folder, entry.getName()); File parent = file.getParentFile(); if (parent.exists() || parent.mkdirs()) { try (OutputStream outputStream = new FileOutputStream(file)) { IOUtils.copy(inputStream, outputStream); } } else { throw new IllegalStateException("Cannot create directory: " + parent.getAbsolutePath()); } } } finally { connection.disconnect(); } } private byte[] readAllBytes(InputStream inputStream) throws IOException { try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { byte[] data = new byte[4096]; while (true) { int size = inputStream.read(data); if (size > 0) { outputStream.write(data, 0, size); } else { return outputStream.toByteArray(); } } } } public static void createCertificate(String certPath, String pfxPath, String alias, String password, String cnName, String dnsName) throws IOException { if (new File(pfxPath).exists()) { return; } String validityInDays = "3650"; String keyAlg = "RSA"; String sigAlg = "SHA1withRSA"; String keySize = "2048"; String storeType = "pkcs12"; String command = "keytool"; String jdkPath = System.getProperty("java.home"); if (jdkPath != null && !jdkPath.isEmpty()) { jdkPath = jdkPath.concat("\\bin"); if (new File(jdkPath).isDirectory()) { command = String.format("%s%s%s", jdkPath, File.separator, command); } } else { return; } // Create Pfx file String[] commandArgs = {command, "-genkey", "-alias", alias, "-keystore", pfxPath, "-storepass", password, "-validity", validityInDays, "-keyalg", keyAlg, "-sigalg", sigAlg, "-keysize", keySize, "-storetype", storeType, "-dname", "CN=" + cnName, "-ext", "EKU=1.3.6.1.5.5.7.3.1"}; if (dnsName != null) { List<String> args = new ArrayList<>(Arrays.asList(commandArgs)); args.add("-ext"); args.add("san=dns:" + dnsName); commandArgs = args.toArray(new String[0]); } cmdInvocation(commandArgs, true); // Create cer file i.e. extract public key from pfx File pfxFile = new File(pfxPath); if (pfxFile.exists()) { String[] certCommandArgs = {command, "-export", "-alias", alias, "-storetype", storeType, "-keystore", pfxPath, "-storepass", password, "-rfc", "-file", certPath}; // output of keytool export command is going to error stream // although command is // executed successfully, hence ignoring error stream in this case cmdInvocation(certCommandArgs, true); // Check if file got created or not File cerFile = new File(pfxPath); if (!cerFile.exists()) { throw new IOException( "Error occurred while creating certificate" + String.join(" ", certCommandArgs)); } } else { throw new IOException("Error occurred while creating certificates" + String.join(" ", commandArgs)); } } public static String cmdInvocation(String[] command, boolean ignoreErrorStream) throws IOException { String result = ""; String error = ""; Process process = new ProcessBuilder(command).start(); try ( InputStream inputStream = process.getInputStream(); InputStream errorStream = process.getErrorStream(); BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); BufferedReader ebr = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8)); ) { result = br.readLine(); process.waitFor(); error = ebr.readLine(); if (error != null && (!"".equals(error))) { // To do - Log error message if (!ignoreErrorStream) { throw new IOException(error, null); } } } catch (Exception e) { throw new RuntimeException("Exception occurred while invoking command", e); } return result; } private static void allowAllSSL() throws NoSuchAlgorithmException, KeyManagementException { TrustManager[] trustAllCerts = new TrustManager[]{ new X509TrustManager() { public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } public void checkClientTrusted( java.security.cert.X509Certificate[] certs, String authType) { } public void checkServerTrusted( java.security.cert.X509Certificate[] certs, String authType) { } } }; SSLContext sslContext = SSLContext.getInstance("SSL"); sslContext.init(null, trustAllCerts, new SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sslContext.getSocketFactory()); } private static final char[] HEX_CODE = "0123456789ABCDEF".toCharArray(); private static String printHexBinary(byte[] data) { StringBuilder r = new StringBuilder(data.length * 2); for (byte b : data) { r.append(HEX_CODE[(b >> 4) & 0xF]); r.append(HEX_CODE[(b & 0xF)]); } return r.toString(); } }
/** * Copyright 2010-2012 The PlayN Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package playn.core.gl; import playn.core.AbstractImage; import playn.core.Asserts; import playn.core.Image; import playn.core.InternalTransform; import playn.core.Pattern; import playn.core.Tint; public abstract class ImageGL extends AbstractImage implements Image { protected final GLContext ctx; /** This image's scale factor. This is effectively final, but can't be marked final because it * can be updated post-construction due to asynchronous image loading. */ protected Scale scale; /** The current count of references to this image. */ protected int refs; /** Our texture and repeatable texture handles. */ protected int tex, reptex; /** * Creates a texture for this image (if one does not already exist) and returns it. May return * 0 if the underlying image data is not yet ready. */ public int ensureTexture(boolean repeatX, boolean repeatY) { if (!isReady()) { return 0; } else if (repeatX || repeatY) { scaleTexture(repeatX, repeatY); return reptex; } else { loadTexture(); return tex; } } /** * Releases this image's texture memory. */ public void clearTexture() { if (tex > 0) { ctx.destroyTexture(tex); tex = 0; } if (reptex > 0) { ctx.destroyTexture(reptex); reptex = 0; } } /** * Increments this image's reference count. Called by {@link ImageLayerGL} to let the image know * that it's part of the scene graph. Note that this reference counting mechanism only exists to * make more efficient use of texture memory. Images are also used by things like {@link Pattern} * which does not support reference counting, thus images must also provide some fallback * mechanism for releasing their texture when no longer needed (like in their finalizer). */ public void reference() { refs++; // we still create our texture on demand } /** * Decrements this image's reference count. Called by {@link ImageLayerGL} to let the image know * that may no longer be part of the scene graph. */ public void release() { Asserts.checkState(refs > 0, "Released an image with no references!"); if (--refs == 0) { clearTexture(); } } /** * Draws this image with the supplied transform in the specified target dimensions. */ void draw(GLShader shader, InternalTransform xform, float dx, float dy, float dw, float dh, boolean repeatX, boolean repeatY, int tint) { int tex = ensureTexture(repeatX, repeatY); if (tex > 0) { float sl = x(), st = y(); float sr = sl + (repeatX ? dw : width()), sb = st + (repeatY ? dh : height()); float texWidth = texWidth(repeatX), texHeight = texHeight(repeatY); ctx.quadShader(shader).prepareTexture(tex, tint).addQuad( xform, dx, dy, dx + dw, dy + dh, sl / texWidth, st / texHeight, sr / texWidth, sb / texHeight); } } /** * Draws this image with the supplied transform, and source and target dimensions. */ void draw(GLShader shader, InternalTransform xform, float dx, float dy, float dw, float dh, float sx, float sy, float sw, float sh, int tint) { int tex = ensureTexture(false, false); if (tex > 0) { sx += x(); sy += y(); float texWidth = texWidth(false), texHeight = texHeight(false); ctx.quadShader(shader).prepareTexture(tex, tint).addQuad( xform, dx, dy, dx + dw, dy + dh, sx / texWidth, sy / texHeight, (sx + sw) / texWidth, (sy + sh) / texHeight); } } protected ImageGL(GLContext ctx, Scale scale) { this.ctx = ctx; this.scale = scale; } /** * The x offset into our source image at which this image's region starts. */ protected float x() { return 0; } /** * The y offset into our source image at which this image's region starts. */ protected float y() { return 0; } /** * Returns the width of our underlying texture image. */ protected float texWidth(boolean repeatX) { return width(); } /** * Returns the height of our underlying texture image. */ protected float texHeight(boolean repeatY) { return height(); } /** * Copies our current image data into the supplied texture. */ protected abstract void updateTexture(int tex); @Override protected void finalize() { if (tex > 0) ctx.queueDestroyTexture(tex); if (reptex > 0) ctx.queueDeleteFramebuffer(reptex); } private void loadTexture() { if (tex > 0) return; tex = ctx.createTexture(false, false); updateTexture(tex); } private void scaleTexture(boolean repeatX, boolean repeatY) { if (reptex > 0) return; int scaledWidth = scale.scaledCeil(width()); int scaledHeight = scale.scaledCeil(height()); // GL requires pow2 on axes that repeat int width = GLUtil.nextPowerOfTwo(scaledWidth), height = GLUtil.nextPowerOfTwo(scaledHeight); // TODO: if width/height > platform_max_size, repeatedly scale by 0.5 until within bounds // platform_max_size = 1024 for iOS, GL10.GL_MAX_TEXTURE_SIZE on android, etc. // no need to scale if our source data is already a power of two if ((width == 0) && (height == 0)) { reptex = ctx.createTexture(scaledWidth, scaledHeight, repeatX, repeatY); updateTexture(reptex); return; } // otherwise we need to scale our non-repeated texture, so load that normally loadTexture(); // width/height == 0 => already a power of two. if (width == 0) width = scaledWidth; if (height == 0) height = scaledHeight; // create our texture and point a new framebuffer at it reptex = ctx.createTexture(width, height, repeatX, repeatY); int fbuf = ctx.createFramebuffer(reptex); ctx.pushFramebuffer(fbuf, width, height); try { // render the non-repeated texture into the framebuffer properly scaled ctx.clear(0, 0, 0, 0); ctx.quadShader(null).prepareTexture(tex, Tint.NOOP_TINT).addQuad( ctx.createTransform(), 0, height, width, 0, 0, 0, 1, 1); } finally { // we no longer need this framebuffer; rebind the previous framebuffer and delete ours ctx.popFramebuffer(); ctx.deleteFramebuffer(fbuf); } } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceResult; /** * <p> * Contains the output of GetPasswordData. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetPasswordDataResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The ID of the Windows instance. * </p> */ private String instanceId; /** * <p> * The time the data was last updated. * </p> */ private java.util.Date timestamp; /** * <p> * The password of the instance. * </p> */ private String passwordData; /** * <p> * The ID of the Windows instance. * </p> * * @param instanceId * The ID of the Windows instance. */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * <p> * The ID of the Windows instance. * </p> * * @return The ID of the Windows instance. */ public String getInstanceId() { return this.instanceId; } /** * <p> * The ID of the Windows instance. * </p> * * @param instanceId * The ID of the Windows instance. * @return Returns a reference to this object so that method calls can be chained together. */ public GetPasswordDataResult withInstanceId(String instanceId) { setInstanceId(instanceId); return this; } /** * <p> * The time the data was last updated. * </p> * * @param timestamp * The time the data was last updated. */ public void setTimestamp(java.util.Date timestamp) { this.timestamp = timestamp; } /** * <p> * The time the data was last updated. * </p> * * @return The time the data was last updated. */ public java.util.Date getTimestamp() { return this.timestamp; } /** * <p> * The time the data was last updated. * </p> * * @param timestamp * The time the data was last updated. * @return Returns a reference to this object so that method calls can be chained together. */ public GetPasswordDataResult withTimestamp(java.util.Date timestamp) { setTimestamp(timestamp); return this; } /** * <p> * The password of the instance. * </p> * * @param passwordData * The password of the instance. */ public void setPasswordData(String passwordData) { this.passwordData = passwordData; } /** * <p> * The password of the instance. * </p> * * @return The password of the instance. */ public String getPasswordData() { return this.passwordData; } /** * <p> * The password of the instance. * </p> * * @param passwordData * The password of the instance. * @return Returns a reference to this object so that method calls can be chained together. */ public GetPasswordDataResult withPasswordData(String passwordData) { setPasswordData(passwordData); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstanceId() != null) sb.append("InstanceId: ").append(getInstanceId()).append(","); if (getTimestamp() != null) sb.append("Timestamp: ").append(getTimestamp()).append(","); if (getPasswordData() != null) sb.append("PasswordData: ").append(getPasswordData()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetPasswordDataResult == false) return false; GetPasswordDataResult other = (GetPasswordDataResult) obj; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; if (other.getTimestamp() == null ^ this.getTimestamp() == null) return false; if (other.getTimestamp() != null && other.getTimestamp().equals(this.getTimestamp()) == false) return false; if (other.getPasswordData() == null ^ this.getPasswordData() == null) return false; if (other.getPasswordData() != null && other.getPasswordData().equals(this.getPasswordData()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); hashCode = prime * hashCode + ((getTimestamp() == null) ? 0 : getTimestamp().hashCode()); hashCode = prime * hashCode + ((getPasswordData() == null) ? 0 : getPasswordData().hashCode()); return hashCode; } @Override public GetPasswordDataResult clone() { try { return (GetPasswordDataResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package org.eclipse.bpel.validator; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import org.eclipse.bpel.fnmeta.model.FMPackage; import org.eclipse.bpel.model.BPELPackage; import org.eclipse.bpel.model.adapters.AdapterRegistry; import org.eclipse.bpel.validator.factory.BPELValidatorAdapterFactory; import org.eclipse.bpel.validator.factory.FunctionMetaValidatorAdapterFactory; import org.eclipse.bpel.validator.helpers.ModelQueryImpl; import org.eclipse.bpel.validator.model.IFactory; import org.eclipse.bpel.validator.model.IModelQuery; import org.eclipse.bpel.validator.model.RuleFactory; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IExtension; import org.eclipse.core.runtime.IExtensionPoint; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.Plugin; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.Job; import org.osgi.framework.BundleContext; /** * The activator class controls the plug-in life cycle */ @SuppressWarnings("nls") public class Activator extends Plugin { /** The plug-in ID */ public static final String PLUGIN_ID = "org.eclipse.bpel.validator"; //$NON-NLS-1$ /** The shared instance */ static Activator plugin; /** * The constructor */ public Activator() { // nothing } /** * (non-Javadoc) * @see org.eclipse.core.runtime.Plugin#start(org.osgi.framework.BundleContext) */ public void start (BundleContext context) throws Exception { super.start(context); plugin = this; // hook up Java logging to the Eclipse error log Logger logger = Logger.getLogger( PLUGIN_ID ); Handler handler = new LogHandler(); logger.addHandler( handler ); initializeAndTime(); } void initialize () { // factories String epName = "factories"; IExtensionPoint ep = Platform.getExtensionRegistry().getExtensionPoint(PLUGIN_ID,epName); if (ep != null) { for(IExtension e : ep.getExtensions() ) { for(IConfigurationElement ce : e.getConfigurationElements() ) { Object obj = null; try { obj = ce.createExecutableExtension("class"); } catch (CoreException e1) { log(e1); } if (obj != null && obj instanceof IFactory) { RuleFactory.INSTANCE.registerFactory( (IFactory) obj); } } } } else { String name = PLUGIN_ID + "." + epName; log(null,IStatus.ERROR,"Extension point " + name + " is not avaialble."); } // modelQuery epName = "modelQuery"; ep = Platform.getExtensionRegistry().getExtensionPoint(PLUGIN_ID,epName); if (ep != null) { for(IExtension e : ep.getExtensions() ) { for(IConfigurationElement ce : e.getConfigurationElements() ) { Object obj = null; try { obj = ce.createExecutableExtension("class"); } catch (CoreException e1) { log(e1); } if (obj != null && obj instanceof IModelQuery) { ModelQueryImpl.register( (IModelQuery) obj); } } } } else { String name = PLUGIN_ID + "." + epName; log(null,IStatus.ERROR,"Extension point " + name + " is not avaialble."); } // Register our adapter providers AdapterRegistry.INSTANCE.registerAdapterFactory( FMPackage.eINSTANCE, FunctionMetaValidatorAdapterFactory.INSTANCE ); AdapterRegistry.INSTANCE.registerAdapterFactory( BPELPackage.eINSTANCE, BPELValidatorAdapterFactory.INSTANCE ); } IStatus initializeAndTime () { long start = System.currentTimeMillis(); initialize (); long end = System.currentTimeMillis(); IStatus status = new Status(IStatus.INFO, PLUGIN_ID, 0, "Validator Startup " + (end - start) + "ms" ,null); return status; } void initializeAsync ( ) { Job job = new Job ("Registering Validator factories ...") { protected IStatus run (IProgressMonitor monitor) { IStatus status = initializeAndTime(); monitor.done(); done(Job.ASYNC_FINISH); return status; } }; job.setPriority(Job.SHORT); job.schedule(); } /** * (non-Javadoc) * @see org.eclipse.core.runtime.Plugin#stop(org.osgi.framework.BundleContext) */ public void stop(BundleContext context) throws Exception { plugin = null; AdapterRegistry.INSTANCE.unregisterAdapterFactory( FMPackage.eINSTANCE, FunctionMetaValidatorAdapterFactory.INSTANCE ); AdapterRegistry.INSTANCE.registerAdapterFactory( BPELPackage.eINSTANCE, BPELValidatorAdapterFactory.INSTANCE ); super.stop(context); } /** * Returns the shared instance * * @return the shared instance */ public static Activator getDefault() { return plugin; } /** * Utility methods for logging exceptions. * @param e exception to log * @param severity the severity to log the exception as. * @param message */ public static void log (Exception e, int severity, String message ) { IStatus status = null; if (e instanceof CoreException) { status = ((CoreException)e).getStatus(); } else { String m = message; if (m == null) { e.getMessage(); } status = new Status(severity, PLUGIN_ID, 0, m==null? "<no message>" : m, e); //$NON-NLS-1$ } plugin.getLog().log(status); } /** * Log an exception. * @param e exception the log */ public static void log(Exception e) { log(e, IStatus.ERROR,null); } class LogHandler extends Handler { /** (non-Javadoc) * @see java.util.logging.Handler#close() */ public void close() throws SecurityException { } /** (non-Javadoc) * @see java.util.logging.Handler#flush() */ public void flush() { } /** (non-Javadoc) * @see java.util.logging.Handler#publish(java.util.logging.LogRecord) */ @SuppressWarnings("nls") public void publish (LogRecord record) { StringBuilder sb = new StringBuilder(); sb.append(record.getThreadID()); sb.append(":"); sb.append(record.getLoggerName()); sb.append(":"); sb.append(record.getMessage()); Throwable t = record.getThrown(); int severity = IStatus.INFO; if (record.getLevel() == Level.SEVERE) { severity = IStatus.ERROR; } else if (record.getLevel() == Level.WARNING) { severity = IStatus.WARNING; } IStatus status = new Status(severity, PLUGIN_ID, 0,sb.toString() , t); //$NON-NLS-1$ plugin.getLog().log(status); } } }
package com.jivesoftware.os.upena.deployable.region; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import com.google.common.collect.Maps; import com.jivesoftware.os.mlogger.core.MetricLogger; import com.jivesoftware.os.mlogger.core.MetricLoggerFactory; import com.jivesoftware.os.upena.deployable.region.ServicesPluginRegion.ServicesPluginRegionInput; import com.jivesoftware.os.upena.deployable.soy.SoyRenderer; import com.jivesoftware.os.upena.service.UpenaStore; import com.jivesoftware.os.upena.shared.Instance; import com.jivesoftware.os.upena.shared.InstanceFilter; import com.jivesoftware.os.upena.shared.InstanceKey; import com.jivesoftware.os.upena.shared.Service; import com.jivesoftware.os.upena.shared.ServiceFilter; import com.jivesoftware.os.upena.shared.ServiceKey; import com.jivesoftware.os.upena.shared.TimestampedValue; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.shiro.SecurityUtils; import org.apache.shiro.authz.AuthorizationException; /** * */ // soy.page.servicesPluginRegion public class ServicesPluginRegion implements PageRegion<ServicesPluginRegionInput> { private static final MetricLogger LOG = MetricLoggerFactory.getLogger(); private final ObjectMapper mapper; private final String template; private final SoyRenderer renderer; private final UpenaStore upenaStore; public ServicesPluginRegion(ObjectMapper mapper, String template, SoyRenderer renderer, UpenaStore upenaStore) { this.mapper = mapper; this.template = template; this.renderer = renderer; this.upenaStore = upenaStore; } @Override public String getRootPath() { return "/ui/services"; } public static class ServicesPluginRegionInput implements PluginInput { final String key; final String name; final String description; final String action; public ServicesPluginRegionInput(String key, String name, String description, String action) { this.key = key; this.name = name; this.description = description; this.action = action; } @Override public String name() { return "Services"; } } @Override public String render(String user, ServicesPluginRegionInput input) { Map<String, Object> data = Maps.newHashMap(); if (SecurityUtils.getSubject().isPermitted("write")) { data.put("readWrite", true); } try { Map<ServiceKey, String> serviceColor = ServiceColorUtil.serviceKeysColor(upenaStore); Map<String, String> filters = new HashMap<>(); filters.put("name", input.name); filters.put("description", input.description); data.put("filters", filters); ServiceFilter filter = new ServiceFilter(null, null, 0, 100_000); if (input.action != null) { if (input.action.equals("filter")) { SecurityUtils.getSubject().checkPermissions("read"); filter = handleFilter(input, data); } else if (input.action.equals("add")) { SecurityUtils.getSubject().checkPermissions("write"); handleAdd(user, filters, input, data); } else if (input.action.equals("update")) { SecurityUtils.getSubject().checkPermissions("write"); handleUpdate(user, filters, input, data); } else if (input.action.equals("remove")) { SecurityUtils.getSubject().checkPermissions("write"); handleRemove(user, input, data); } } List<Map<String, String>> rows = new ArrayList<>(); Map<ServiceKey, TimestampedValue<Service>> found = upenaStore.services.find(false, filter); for (Map.Entry<ServiceKey, TimestampedValue<Service>> entrySet : found.entrySet()) { ServiceKey key = entrySet.getKey(); TimestampedValue<Service> timestampedValue = entrySet.getValue(); Service value = timestampedValue.getValue(); InstanceFilter instanceFilter = new InstanceFilter( null, null, key, null, null, 0, 100_000); Map<InstanceKey, TimestampedValue<Instance>> instances = upenaStore.instances.find(false, instanceFilter); Map<String, String> row = new HashMap<>(); row.put("instanceCount", String.valueOf(instances.size())); row.put("color", serviceColor.get(key)); row.put("key", key.getKey()); row.put("name", value.name); row.put("description", value.description); rows.add(row); } Collections.sort(rows, (Map<String, String> o1, Map<String, String> o2) -> { String serviceName1 = o1.get("name"); String serviceName2 = o2.get("name"); int c = serviceName1.compareTo(serviceName2); if (c != 0) { return c; } return c; }); data.put("services", rows); } catch (AuthorizationException a) { throw a; } catch (Exception e) { LOG.error("Unable to retrieve data", e); } return renderer.render(template, data); } private ServiceFilter handleFilter(ServicesPluginRegionInput input, Map<String, Object> data) { ServiceFilter filter; filter = new ServiceFilter( input.name.isEmpty() ? null : input.name, input.description.isEmpty() ? null : input.description, 0, 100_000); data.put("message", "Filtering: name.contains '" + input.name + "' description.contains '" + input.description + "'"); return filter; } private void handleAdd(String user, Map<String, String> filters, ServicesPluginRegionInput input, Map<String, Object> data) { filters.clear(); try { Service newService = new Service(input.name, input.description); upenaStore.services.update(null, newService); data.put("message", "Created Service:" + input.name); upenaStore.recordChange(user, "added", System.currentTimeMillis(), "", "service-ui", newService.toString()); } catch (Exception x) { String trace = x.getMessage() + "\n" + Joiner.on("\n").join(x.getStackTrace()); data.put("message", "Error while trying to add Service:" + input.name + "\n" + trace); } } private void handleUpdate(String user, Map<String, String> filters, ServicesPluginRegionInput input, Map<String, Object> data) { filters.clear(); try { Service service = upenaStore.services.get(new ServiceKey(input.key)); if (service == null) { data.put("message", "Update failed. No existing service. Someone may have removed it since your last refresh."); } else { Service update = new Service(input.name, input.description); upenaStore.services.update(new ServiceKey(input.key), update); upenaStore.recordChange(user, "updated", System.currentTimeMillis(), "", "service-ui", update.toString()); data.put("message", "Service Cluster:" + input.name); } } catch (Exception x) { String trace = x.getMessage() + "\n" + Joiner.on("\n").join(x.getStackTrace()); data.put("message", "Error while trying to add Service:" + input.name + "\n" + trace); } } private void handleRemove(String user, ServicesPluginRegionInput input, Map<String, Object> data) { if (input.key.isEmpty()) { data.put("message", "Failed to remove Service:" + input.name); } else { try { ServiceKey serviceKey = new ServiceKey(input.key); Service removing = upenaStore.services.get(serviceKey); if (removing != null) { upenaStore.services.remove(serviceKey); upenaStore.recordChange(user, "updated", System.currentTimeMillis(), "", "service-ui", removing.toString()); } } catch (Exception x) { String trace = x.getMessage() + "\n" + Joiner.on("\n").join(x.getStackTrace()); data.put("message", "Error while trying to remove Service:" + input.name + "\n" + trace); } } } @Override public String getTitle() { return "Upena Services"; } public String doExport(ServicesPluginRegionInput input, String user) { try { ServiceFilter filter; filter = new ServiceFilter( input.name.isEmpty() ? null : input.name, input.description.isEmpty() ? null : input.description, 0, 100_000); ListOfService values = new ListOfService(); Map<ServiceKey, TimestampedValue<Service>> found = upenaStore.services.find(false, filter); for (Map.Entry<ServiceKey, TimestampedValue<Service>> entrySet : found.entrySet()) { TimestampedValue<Service> timestampedValue = entrySet.getValue(); Service value = timestampedValue.getValue(); values.add(value); } return mapper.writeValueAsString(values); } catch (Exception e) { LOG.error("Unable to retrieve data", e); return e.toString(); } } public String doImport(String in, String user) { Map<String, Object> data = Maps.newHashMap(); try { ListOfService values = mapper.readValue(in, ListOfService.class); for (Service value : values) { upenaStore.services.update(null, value); data.put("message", "Import:" + value.name); upenaStore.recordChange(user, "imported", System.currentTimeMillis(), "", "release-ui", value.toString()); } return "Imported " + values.size(); } catch (Exception x) { LOG.error("Unable to retrieve data", x); String trace = x.getMessage() + "\n" + Joiner.on("\n").join(x.getStackTrace()); return "Error while trying to import releaseGroups \n" + trace; } } static class ListOfService extends ArrayList<Service> { } }
/** */ package guizmo.structure.impl; import guizmo.structure.*; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EDataType; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.impl.EFactoryImpl; import org.eclipse.emf.ecore.plugin.EcorePlugin; /** * <!-- begin-user-doc --> * An implementation of the model <b>Factory</b>. * <!-- end-user-doc --> * @generated */ public class StructureFactoryImpl extends EFactoryImpl implements StructureFactory { /** * Creates the default factory implementation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static StructureFactory init() { try { StructureFactory theStructureFactory = (StructureFactory)EPackage.Registry.INSTANCE.getEFactory("http://www.modelum.es/guizmo/structure"); if (theStructureFactory != null) { return theStructureFactory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new StructureFactoryImpl(); } /** * Creates an instance of the factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StructureFactoryImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EObject create(EClass eClass) { switch (eClass.getClassifierID()) { case StructurePackage.STRUCTURE_ROOT: return createStructureRoot(); case StructurePackage.RESOURCE_REPOSITORY: return createResourceRepository(); case StructurePackage.IMAGE_RESOURCE: return createImageResource(); case StructurePackage.I18N_RESOURCE: return createI18NResource(); case StructurePackage.TEXT_TRANSLATION: return createTextTranslation(); case StructurePackage.GRAPHICAL_VIEW: return createGraphicalView(); case StructurePackage.GUI_REGION: return createGUIRegion(); case StructurePackage.TEXT: return createText(); case StructurePackage.IMAGE: return createImage(); case StructurePackage.OUTPUT_TEXT: return createOutputText(); case StructurePackage.TEXT_BOX: return createTextBox(); case StructurePackage.TEXT_AREA: return createTextArea(); case StructurePackage.ITEM: return createItem(); case StructurePackage.COMBO_BOX: return createComboBox(); case StructurePackage.CHECK_BOX: return createCheckBox(); case StructurePackage.RADIO_GROUP: return createRadioGroup(); case StructurePackage.LIST_BOX: return createListBox(); case StructurePackage.IMAGE_CONTAINER: return createImageContainer(); case StructurePackage.VIDEO_CONTAINER: return createVideoContainer(); case StructurePackage.BUTTON: return createButton(); case StructurePackage.OBJECT_WIDGET: return createObjectWidget(); case StructurePackage.TABLE: return createTable(); case StructurePackage.TABLE_ROW: return createTableRow(); default: throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object createFromString(EDataType eDataType, String initialValue) { switch (eDataType.getClassifierID()) { case StructurePackage.LANGUAGE_TYPE: return createLanguageTypeFromString(eDataType, initialValue); default: throw new IllegalArgumentException("The datatype '" + eDataType.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String convertToString(EDataType eDataType, Object instanceValue) { switch (eDataType.getClassifierID()) { case StructurePackage.LANGUAGE_TYPE: return convertLanguageTypeToString(eDataType, instanceValue); default: throw new IllegalArgumentException("The datatype '" + eDataType.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StructureRoot createStructureRoot() { StructureRootImpl structureRoot = new StructureRootImpl(); return structureRoot; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ResourceRepository createResourceRepository() { ResourceRepositoryImpl resourceRepository = new ResourceRepositoryImpl(); return resourceRepository; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ImageResource createImageResource() { ImageResourceImpl imageResource = new ImageResourceImpl(); return imageResource; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public I18NResource createI18NResource() { I18NResourceImpl i18NResource = new I18NResourceImpl(); return i18NResource; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TextTranslation createTextTranslation() { TextTranslationImpl textTranslation = new TextTranslationImpl(); return textTranslation; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public GraphicalView createGraphicalView() { GraphicalViewImpl graphicalView = new GraphicalViewImpl(); return graphicalView; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public GUIRegion createGUIRegion() { GUIRegionImpl guiRegion = new GUIRegionImpl(); return guiRegion; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Text createText() { TextImpl text = new TextImpl(); return text; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Image createImage() { ImageImpl image = new ImageImpl(); return image; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public OutputText createOutputText() { OutputTextImpl outputText = new OutputTextImpl(); return outputText; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TextBox createTextBox() { TextBoxImpl textBox = new TextBoxImpl(); return textBox; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TextArea createTextArea() { TextAreaImpl textArea = new TextAreaImpl(); return textArea; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Item createItem() { ItemImpl item = new ItemImpl(); return item; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ComboBox createComboBox() { ComboBoxImpl comboBox = new ComboBoxImpl(); return comboBox; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CheckBox createCheckBox() { CheckBoxImpl checkBox = new CheckBoxImpl(); return checkBox; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RadioGroup createRadioGroup() { RadioGroupImpl radioGroup = new RadioGroupImpl(); return radioGroup; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ListBox createListBox() { ListBoxImpl listBox = new ListBoxImpl(); return listBox; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ImageContainer createImageContainer() { ImageContainerImpl imageContainer = new ImageContainerImpl(); return imageContainer; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public VideoContainer createVideoContainer() { VideoContainerImpl videoContainer = new VideoContainerImpl(); return videoContainer; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Button createButton() { ButtonImpl button = new ButtonImpl(); return button; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ObjectWidget createObjectWidget() { ObjectWidgetImpl objectWidget = new ObjectWidgetImpl(); return objectWidget; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Table createTable() { TableImpl table = new TableImpl(); return table; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TableRow createTableRow() { TableRowImpl tableRow = new TableRowImpl(); return tableRow; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public LanguageType createLanguageTypeFromString(EDataType eDataType, String initialValue) { LanguageType result = LanguageType.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertLanguageTypeToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StructurePackage getStructurePackage() { return (StructurePackage)getEPackage(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @deprecated * @generated */ @Deprecated public static StructurePackage getPackage() { return StructurePackage.eINSTANCE; } } //StructureFactoryImpl
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package tech.gusavila92.apache.http.config; import tech.gusavila92.apache.http.annotation.ThreadingBehavior; import tech.gusavila92.apache.http.annotation.Contract; import tech.gusavila92.apache.http.util.Args; /** * Socket configuration. * * @since 4.3 */ @Contract(threading = ThreadingBehavior.IMMUTABLE) public class SocketConfig implements Cloneable { public static final SocketConfig DEFAULT = new Builder().build(); private final int soTimeout; private final boolean soReuseAddress; private final int soLinger; private final boolean soKeepAlive; private final boolean tcpNoDelay; private final int sndBufSize; private final int rcvBufSize; private final int backlogSize; SocketConfig( final int soTimeout, final boolean soReuseAddress, final int soLinger, final boolean soKeepAlive, final boolean tcpNoDelay, final int sndBufSize, final int rcvBufSize, final int backlogSize) { super(); this.soTimeout = soTimeout; this.soReuseAddress = soReuseAddress; this.soLinger = soLinger; this.soKeepAlive = soKeepAlive; this.tcpNoDelay = tcpNoDelay; this.sndBufSize = sndBufSize; this.rcvBufSize = rcvBufSize; this.backlogSize = backlogSize; } /** * Determines the default socket timeout value for non-blocking I/O operations. * <p> * Default: {@code 0} (no timeout) * </p> * * @return the default socket timeout value for non-blocking I/O operations. * @see java.net.SocketOptions#SO_TIMEOUT */ public int getSoTimeout() { return soTimeout; } /** * Determines the default value of the {@link java.net.SocketOptions#SO_REUSEADDR} parameter * for newly created sockets. * <p> * Default: {@code false} * </p> * * @return the default value of the {@link java.net.SocketOptions#SO_REUSEADDR} parameter. * @see java.net.SocketOptions#SO_REUSEADDR */ public boolean isSoReuseAddress() { return soReuseAddress; } /** * Determines the default value of the {@link java.net.SocketOptions#SO_LINGER} parameter * for newly created sockets. * <p> * Default: {@code -1} * </p> * * @return the default value of the {@link java.net.SocketOptions#SO_LINGER} parameter. * @see java.net.SocketOptions#SO_LINGER */ public int getSoLinger() { return soLinger; } /** * Determines the default value of the {@link java.net.SocketOptions#SO_KEEPALIVE} parameter * for newly created sockets. * <p> * Default: {@code -1} * </p> * * @return the default value of the {@link java.net.SocketOptions#SO_KEEPALIVE} parameter. * @see java.net.SocketOptions#SO_KEEPALIVE */ public boolean isSoKeepAlive() { return soKeepAlive; } /** * Determines the default value of the {@link java.net.SocketOptions#TCP_NODELAY} parameter * for newly created sockets. * <p> * Default: {@code false} * </p> * * @return the default value of the {@link java.net.SocketOptions#TCP_NODELAY} parameter. * @see java.net.SocketOptions#TCP_NODELAY */ public boolean isTcpNoDelay() { return tcpNoDelay; } /** * Determines the default value of the {@link java.net.SocketOptions#SO_SNDBUF} parameter * for newly created sockets. * <p> * Default: {@code 0} (system default) * </p> * * @return the default value of the {@link java.net.SocketOptions#SO_SNDBUF} parameter. * @see java.net.SocketOptions#SO_SNDBUF * @since 4.4 */ public int getSndBufSize() { return sndBufSize; } /** * Determines the default value of the {@link java.net.SocketOptions#SO_RCVBUF} parameter * for newly created sockets. * <p> * Default: {@code 0} (system default) * </p> * * @return the default value of the {@link java.net.SocketOptions#SO_RCVBUF} parameter. * @see java.net.SocketOptions#SO_RCVBUF * @since 4.4 */ public int getRcvBufSize() { return rcvBufSize; } /** * Determines the maximum queue length for incoming connection indications * (a request to connect) also known as server socket backlog. * <p> * Default: {@code 0} (system default) * </p> * @return the maximum queue length for incoming connection indications * @since 4.4 */ public int getBacklogSize() { return backlogSize; } @Override protected SocketConfig clone() throws CloneNotSupportedException { return (SocketConfig) super.clone(); } @Override public String toString() { final StringBuilder builder = new StringBuilder(); builder.append("[soTimeout=").append(this.soTimeout) .append(", soReuseAddress=").append(this.soReuseAddress) .append(", soLinger=").append(this.soLinger) .append(", soKeepAlive=").append(this.soKeepAlive) .append(", tcpNoDelay=").append(this.tcpNoDelay) .append(", sndBufSize=").append(this.sndBufSize) .append(", rcvBufSize=").append(this.rcvBufSize) .append(", backlogSize=").append(this.backlogSize) .append("]"); return builder.toString(); } public static SocketConfig.Builder custom() { return new Builder(); } public static SocketConfig.Builder copy(final SocketConfig config) { Args.notNull(config, "Socket config"); return new Builder() .setSoTimeout(config.getSoTimeout()) .setSoReuseAddress(config.isSoReuseAddress()) .setSoLinger(config.getSoLinger()) .setSoKeepAlive(config.isSoKeepAlive()) .setTcpNoDelay(config.isTcpNoDelay()) .setSndBufSize(config.getSndBufSize()) .setRcvBufSize(config.getRcvBufSize()) .setBacklogSize(config.getBacklogSize()); } public static class Builder { private int soTimeout; private boolean soReuseAddress; private int soLinger; private boolean soKeepAlive; private boolean tcpNoDelay; private int sndBufSize; private int rcvBufSize; private int backlogSize; Builder() { this.soLinger = -1; this.tcpNoDelay = true; } public Builder setSoTimeout(final int soTimeout) { this.soTimeout = soTimeout; return this; } public Builder setSoReuseAddress(final boolean soReuseAddress) { this.soReuseAddress = soReuseAddress; return this; } public Builder setSoLinger(final int soLinger) { this.soLinger = soLinger; return this; } public Builder setSoKeepAlive(final boolean soKeepAlive) { this.soKeepAlive = soKeepAlive; return this; } public Builder setTcpNoDelay(final boolean tcpNoDelay) { this.tcpNoDelay = tcpNoDelay; return this; } /** * @since 4.4 */ public Builder setSndBufSize(final int sndBufSize) { this.sndBufSize = sndBufSize; return this; } /** * @since 4.4 */ public Builder setRcvBufSize(final int rcvBufSize) { this.rcvBufSize = rcvBufSize; return this; } /** * @since 4.4 */ public Builder setBacklogSize(final int backlogSize) { this.backlogSize = backlogSize; return this; } public SocketConfig build() { return new SocketConfig(soTimeout, soReuseAddress, soLinger, soKeepAlive, tcpNoDelay, sndBufSize, rcvBufSize, backlogSize); } } }
/* * * Copyright 2015 AT&T Foundry * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.onosproject.aaa; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; public class StateMachineTest { StateMachine stateMachine = null; @Before public void setUp() { System.out.println("Set Up."); StateMachine.bitSet.clear(); StateMachine.initializeMaps(); stateMachine = new StateMachine("session0", null); } @After public void tearDown() { System.out.println("Tear Down."); StateMachine.bitSet.clear(); StateMachine.destroyMaps(); stateMachine = null; } @Test /** * Test all the basic inputs from state to state: IDLE -> STARTED -> PENDING -> AUTHORIZED -> IDLE */ public void basic() throws StateMachineException { System.out.println("======= BASIC =======."); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); stateMachine.start(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_STARTED); stateMachine.requestAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); stateMachine.authorizeAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_AUTHORIZED); stateMachine.logoff(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); } @Test /** * Test all inputs from an IDLE state (starting with the ones that are not impacting the current state) */ public void testIdleState() throws StateMachineException { System.out.println("======= IDLE STATE TEST =======."); stateMachine.requestAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); stateMachine.authorizeAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); stateMachine.denyAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); stateMachine.logoff(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); stateMachine.start(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_STARTED); } @Test /** * Test all inputs from an STARTED state (starting with the ones that are not impacting the current state) */ public void testStartedState() throws StateMachineException { System.out.println("======= STARTED STATE TEST =======."); stateMachine.start(); stateMachine.authorizeAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_STARTED); stateMachine.denyAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_STARTED); stateMachine.logoff(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_STARTED); stateMachine.start(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_STARTED); stateMachine.requestAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); } @Test /** * Test all inputs from a PENDING state (starting with the ones that are not impacting the current state). * The next valid state for this test is AUTHORIZED */ public void testPendingStateToAuthorized() throws StateMachineException { System.out.println("======= PENDING STATE TEST (AUTHORIZED) =======."); stateMachine.start(); stateMachine.requestAccess(); stateMachine.logoff(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); stateMachine.start(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); stateMachine.requestAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); stateMachine.authorizeAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_AUTHORIZED); stateMachine.denyAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_AUTHORIZED); } @Test /** * Test all inputs from an PENDING state (starting with the ones that are not impacting the current state). * The next valid state for this test is UNAUTHORIZED */ public void testPendingStateToUnauthorized() throws StateMachineException { System.out.println("======= PENDING STATE TEST (DENIED) =======."); stateMachine.start(); stateMachine.requestAccess(); stateMachine.logoff(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); stateMachine.start(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); stateMachine.requestAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_PENDING); stateMachine.denyAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_UNAUTHORIZED); stateMachine.authorizeAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_UNAUTHORIZED); } @Test /** * Test all inputs from an AUTHORIZED state (starting with the ones that are not impacting the current state). */ public void testAuthorizedState() throws StateMachineException { System.out.println("======= AUTHORIZED STATE TEST =======."); stateMachine.start(); stateMachine.requestAccess(); stateMachine.authorizeAccess(); stateMachine.start(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_AUTHORIZED); stateMachine.requestAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_AUTHORIZED); stateMachine.authorizeAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_AUTHORIZED); stateMachine.denyAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_AUTHORIZED); stateMachine.logoff(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); } @Test /** * Test all inputs from an UNAUTHORIZED state (starting with the ones that are not impacting the current state). */ public void testUnauthorizedState() throws StateMachineException { System.out.println("======= UNAUTHORIZED STATE TEST =======."); stateMachine.start(); stateMachine.requestAccess(); stateMachine.denyAccess(); stateMachine.start(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_UNAUTHORIZED); stateMachine.requestAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_UNAUTHORIZED); stateMachine.authorizeAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_UNAUTHORIZED); stateMachine.denyAccess(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_UNAUTHORIZED); stateMachine.logoff(); Assert.assertEquals(stateMachine.state(), StateMachine.STATE_IDLE); } @Test public void testIdentifierAvailability() throws StateMachineException { System.out.println("======= IDENTIFIER TEST =======."); byte identifier = stateMachine.identifier(); System.out.println("State: " + stateMachine.state()); System.out.println("Identifier: " + Byte.toUnsignedInt(identifier)); Assert.assertEquals(-1, identifier); stateMachine.start(); StateMachine sm247 = null; StateMachine sm3 = null; //create 255 others state machines for (int i = 1; i <= 255; i++) { StateMachine sm = new StateMachine("session" + i, null); sm.start(); byte id = sm.identifier(); Assert.assertEquals(i, Byte.toUnsignedInt(id)); if (i == 3) { sm3 = sm; System.out.println("SM3: " + sm3.toString()); } if (i == 247) { sm247 = sm; System.out.println("SM247: " + sm247.toString()); } } //simulate the state machine for a specific session and logoff so we can free up a spot for an identifier //let's choose identifier 247 then we free up 3 Assert.assertNotNull(sm247); sm247.requestAccess(); sm247.authorizeAccess(); sm247.logoff(); Assert.assertNotNull(sm3); sm3.requestAccess(); sm3.authorizeAccess(); sm3.logoff(); StateMachine otherSM3 = new StateMachine("session3b", null); otherSM3.start(); otherSM3.requestAccess(); byte id3 = otherSM3.identifier(); Assert.assertEquals(3, Byte.toUnsignedInt(id3)); StateMachine otherSM247 = new StateMachine("session247b", null); otherSM247.start(); otherSM247.requestAccess(); byte id247 = otherSM247.identifier(); Assert.assertEquals(247, Byte.toUnsignedInt(id247)); } @Test public void testSessionIdLookups() { String sessionId1 = "session1"; String sessionId2 = "session2"; String sessionId3 = "session3"; StateMachine machine1ShouldBeNull = StateMachine.lookupStateMachineBySessionId(sessionId1); assertNull(machine1ShouldBeNull); StateMachine machine2ShouldBeNull = StateMachine.lookupStateMachineBySessionId(sessionId2); assertNull(machine2ShouldBeNull); StateMachine stateMachine1 = new StateMachine(sessionId1, null); StateMachine stateMachine2 = new StateMachine(sessionId2, null); assertEquals(stateMachine1, StateMachine.lookupStateMachineBySessionId(sessionId1)); assertEquals(stateMachine2, StateMachine.lookupStateMachineBySessionId(sessionId2)); assertNull(StateMachine.lookupStateMachineBySessionId(sessionId3)); } @Test public void testIdentifierLookups() throws StateMachineException { String sessionId1 = "session1"; String sessionId2 = "session2"; StateMachine machine1ShouldBeNull = StateMachine.lookupStateMachineById((byte) 1); assertNull(machine1ShouldBeNull); StateMachine machine2ShouldBeNull = StateMachine.lookupStateMachineById((byte) 2); assertNull(machine2ShouldBeNull); StateMachine stateMachine1 = new StateMachine(sessionId1, null); stateMachine1.start(); StateMachine stateMachine2 = new StateMachine(sessionId2, null); stateMachine2.start(); assertEquals(stateMachine1, StateMachine.lookupStateMachineById(stateMachine1.identifier())); assertEquals(stateMachine2, StateMachine.lookupStateMachineById(stateMachine2.identifier())); } }
package com.smeanox.games.sg002.util; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.assets.AssetManager; import com.badlogic.gdx.assets.loaders.TextureLoader; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.freetype.FreeTypeFontGenerator; import com.badlogic.gdx.graphics.g2d.freetype.FreeTypeFontGenerator.FreeTypeFontParameter; import com.smeanox.games.sg002.world.GameObjectType; import com.smeanox.games.sg002.world.MapObjectType; /** * Manage all Assets * * @author Benjamin Schmid */ public class Assets { private static AssetManager manager; /** * smeanox logo */ public static Texture smeanox; /** * button */ public static Texture button; /** * background */ public static Texture background; /** * selection */ public static Texture selection; /** * healthbar tex */ public static Texture healthbar; /** * grid */ public static Texture grid; /** * possible field for movement */ public static Texture possibleFieldMove; /** * possible field for fight */ public static Texture possibleFieldFight; /** * possible field for production */ public static Texture possibleFieldProduce; /** possible field for goldmine */ public static Texture gold; /** * liberation font */ public static BitmapFontRapper liberationMicroShadow; public static BitmapFontRapper liberationMicro; public static BitmapFontRapper liberationSmall; public static BitmapFontRapper liberationMedium; public static BitmapFontRapper liberationLarge; private static boolean finishedCompletly; private Assets() { } /** * load all assets necessary to display the splash screen */ public static void loadAssetsSplashScreen() { if (manager == null) { manager = new AssetManager(); } TextureLoader.TextureParameter param = new TextureLoader.TextureParameter(); param.minFilter = Consts.textureFilter; param.genMipMaps = param.minFilter == Texture.TextureFilter.MipMapLinearLinear; manager.load("smeanox.png", Texture.class, param); manager.finishLoading(); smeanox = manager.get("smeanox.png", Texture.class); } /** * prepare the AssetManager to load the assets */ public static void prepareLoadAssets() { if (manager == null) { manager = new AssetManager(); } liberationSmall = new BitmapFontRapper(); liberationMedium = new BitmapFontRapper(); liberationLarge = new BitmapFontRapper(); TextureLoader.TextureParameter param = new TextureLoader.TextureParameter(); param.minFilter = Consts.textureFilter; param.genMipMaps = param.minFilter == Texture.TextureFilter.MipMapLinearLinear; manager.load("images/button.png", Texture.class); manager.load("images/background.png", Texture.class, param); manager.load("images/selection.png", Texture.class, param); manager.load("images/grid.png", Texture.class, param); manager.load("images/possibleFieldMove.png", Texture.class, param); manager.load("images/possibleFieldFight.png", Texture.class, param); manager.load("images/possibleFieldProduce.png", Texture.class, param); manager.load("images/gold.png", Texture.class, param); manager.load("images/healthbar.png", Texture.class, param); finishedCompletly = false; } /** * Add the asset to be loaded * * @param filename filename of the asset to be loaded * @param type class of the asset */ public static void addToLoadQueue(String filename, Class type){ TextureLoader.TextureParameter param = new TextureLoader.TextureParameter(); param.minFilter = Consts.textureFilter; param.genMipMaps = param.minFilter == Texture.TextureFilter.MipMapLinearLinear; manager.load(filename, type, param); } /** * load the assets * * @return true if the assets are loaded */ public static boolean loadAssets() { if (manager.update()) { if (!finishedCompletly) { finishedLoading(); return false; } return true; } return false; } /** * return the loaded asset * * @param name filename of the loaded asset * @param <T> Type of the asset * @return the asset */ public static <T> T getAsset(String name) { return (T) manager.get(name); } /** * assign the loaded assets to variables */ private static void finishedLoading() { button = manager.get("images/button.png", Texture.class); background = manager.get("images/background.png", Texture.class); selection = manager.get("images/selection.png", Texture.class); grid = manager.get("images/grid.png", Texture.class); possibleFieldMove = manager.get("images/possibleFieldMove.png", Texture.class); possibleFieldFight = manager.get("images/possibleFieldFight.png", Texture.class); possibleFieldProduce = manager.get("images/possibleFieldProduce.png", Texture.class); gold = manager.get("images/gold.png", Texture.class); healthbar = manager.get("images/healthbar.png", Texture.class); setGameObjectTypeTextures(); setMapObjectTypeTextures(); createFonts(); finishedCompletly = true; } /** * assign the loaded assets to gameObjectTypes */ private static void setGameObjectTypeTextures(){ for(GameObjectType gameObjectType : GameObjectType.getAllGameObjectTypes()){ gameObjectType.setTexture(manager.get(gameObjectType.getTextureName(), Texture.class)); } } /** * assign the loaded assets to mapObjectTypes */ private static void setMapObjectTypeTextures(){ for(MapObjectType mapObjectType : MapObjectType.getMapObjectTypes()){ mapObjectType.setTexture(manager.get(mapObjectType.getTextureName(), Texture.class)); } } /** * Create fonts on the fly for the active screen size */ public static void createFonts() { if (liberationMicroShadow == null) { liberationMicroShadow = new BitmapFontRapper(); } if (liberationMicro == null) { liberationMicro = new BitmapFontRapper(); } if (liberationSmall == null) { liberationSmall = new BitmapFontRapper(); } if (liberationMedium == null) { liberationMedium = new BitmapFontRapper(); } if (liberationLarge == null) { liberationLarge = new BitmapFontRapper(); } if (liberationSmall.bitmapFont != null) { liberationSmall.bitmapFont.dispose(); } if (liberationMedium.bitmapFont != null) { liberationMedium.bitmapFont.dispose(); } if (liberationLarge.bitmapFont != null) { liberationLarge.bitmapFont.dispose(); } FreeTypeFontGenerator generator = new FreeTypeFontGenerator(Gdx.files.internal("fonts/LiberationSans-Regular.ttf")); FreeTypeFontParameter parameter = new FreeTypeFontParameter(); // add Eur sign parameter.characters = FreeTypeFontGenerator.DEFAULT_CHARS + "\u20AC"; parameter.size = (int) Math.ceil(Consts.fontSizeMicro * Consts.devScaleY); liberationMicro.bitmapFont = generator.generateFont(parameter); parameter.size = (int) Math.ceil(Consts.fontSizeSmall * Consts.devScaleY); liberationSmall.bitmapFont = generator.generateFont(parameter); parameter.size = (int) Math.ceil(Consts.fontSizeMedium * Consts.devScaleY); liberationMedium.bitmapFont = generator.generateFont(parameter); parameter.size = (int) Math.ceil(Consts.fontSizeLarge * Consts.devScaleY); liberationLarge.bitmapFont = generator.generateFont(parameter); parameter.size = (int) Math.ceil(Consts.fontSizeMicro * Consts.devScaleY); parameter.shadowColor = Color.BLACK; parameter.shadowOffsetX = (int) Math.ceil(0.5 * Consts.devScaleX); parameter.shadowOffsetY = (int) Math.ceil(0.5 * Consts.devScaleY); liberationMicroShadow.bitmapFont = generator.generateFont(parameter); generator.dispose(); } /** * Dispose all assets */ public static void dispose() { if(manager != null) { manager.dispose(); manager = null; } } }
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.lib.selectorfields.v201506.cm; import com.google.api.ads.adwords.lib.selectorfields.EntityField; import com.google.api.ads.adwords.lib.selectorfields.Filterable; /** * A {@code Enum} to facilitate the selection of fields for {@code AdGroupAd}. */ public enum AdGroupAdField implements EntityField { // Fields constants definitions /** * List of disapproval reasons. */ @Filterable AdGroupAdDisapprovalReasons(true), /** * True if and only if this ad is not serving because it does not meet trademark policy. * This field is only useful when {@link #approvalStatus approvalStatus} is an approved status. */ AdGroupAdTrademarkDisapproved(false), /** * Approval status. */ @Filterable AdGroupCreativeApprovalStatus(true), /** * The id of the adgroup containing this ad. */ @Filterable AdGroupId(true), /** * The Advertising Digital Identification code for this media, as defined by the American Association of Advertising Agencies, used mainly for television commercials. */ AdvertisingId(false), /** * Business name of the ad. */ @Filterable CallOnlyAdBusinessName(true), /** * If set to true, enable call tracking for the creative. * Enabling call tracking also enables call conversions. */ CallOnlyAdCallTracked(false), /** * Conversion type to attribute a call conversion to. * If not set, then a default conversion type id is used. * Only in effect if callTracked is also set to true otherwise this field is ignored. */ CallOnlyAdConversionTypeId(false), /** * Two letter country code for the ad. * Examples: 'US', 'GB'. */ CallOnlyAdCountryCode(false), /** * First line of ad text. */ @Filterable CallOnlyAdDescription1(true), /** * Second line of ad text. */ @Filterable CallOnlyAdDescription2(true), /** * By default, call conversions are enabled when callTracked is on. * To disable call conversions, set this field to true. * Only in effect if callTracked is also set to true. * If callTracked is set to false, this field is ignored. */ CallOnlyAdDisableCallConversion(false), /** * Phone number string for the ad. * Examples: '(800) 356-9377', "16502531234", "+442001234567" */ CallOnlyAdPhoneNumber(false), /** * Url to be used for phone number verification. */ CallOnlyAdPhoneNumberVerificationUrl(false), /** * Media creation date in the format YYYY-MM-DD HH:MM:SS+TZ. * This is not updatable and not specifiable. */ CreationTime(false), /** * A list of final app URLs that will be used on mobile if the user has the specific app installed. * <p>This field is used for upgraded urls only, as described at: https://developers.google.com/adwords/api/docs/guides/upgraded-urls */ @Filterable CreativeFinalAppUrls(true), /** * A list of possible final mobile URLs after all cross domain redirects. * <p>This field is used for upgraded urls only, as described at: https://developers.google.com/adwords/api/docs/guides/upgraded-urls */ @Filterable CreativeFinalMobileUrls(true), /** * A list of possible final URLs after all cross domain redirects. * <p>This field is used for upgraded urls only, as described at: https://developers.google.com/adwords/api/docs/guides/upgraded-urls */ @Filterable CreativeFinalUrls(true), /** * URL template for constructing a tracking URL. * <p>This field is used for upgraded urls only, as described at: https://developers.google.com/adwords/api/docs/guides/upgraded-urls */ @Filterable CreativeTrackingUrlTemplate(true), /** * A list of mappings to be used for substituting URL custom parameter tags in the trackingUrlTemplate, finalUrls, and/or finalMobileUrls. * <p>This field is used for upgraded urls only, as described at: https://developers.google.com/adwords/api/docs/guides/upgraded-urls */ @Filterable CreativeUrlCustomParameters(true), /** * The first description line */ @Filterable Description1(true), /** * The second description line */ @Filterable Description2(true), /** * The device preference for the ad. */ @Filterable DevicePreference(true), /** * Various dimension sizes for the media. * Only applies to image media (and video media for video thumbnails). */ Dimensions(false), /** * Visible URL. */ @Filterable DisplayUrl(true), /** * The duration of the associated audio, in milliseconds. */ @Filterable DurationMillis(true), /** * Allowed expanding directions. * These directions are used to match publishers' ad slots. * For example, if a slot allows expansion toward the right, only ads with EXPANDING_RIGHT specified will show up there. */ ExpandingDirections(false), /** * Status of the experiment row */ ExperimentDataStatus(false), /** * Status of this AdGroupAd in the experiment. * It must be specified while adding experiment data, which could come as a SET as well */ ExperimentDeltaStatus(false), /** * */ @Filterable ExperimentId(true), /** * The size of the media file in bytes. */ FileSize(false), /** * The headline of the ad */ @Filterable Headline(true), /** * Height of the dimension */ Height(false), /** * ID of this ad. * This field is ignored when creating ads using {@code AdGroupAdService}. */ @Filterable Id(true), /** * The name label for this ad. */ @Filterable ImageCreativeName(true), /** * The Industry Standard Commercial Identifier code for this media, used mainly for television commercials. */ IndustryStandardCommercialIdentifier(false), /** * Defines whether or not the ad is cookie targeted. * (i.e. * user list targeting, or the network's equivalent). */ IsCookieTargeted(false), /** * Defines whether or not the ad contains a tracking pixel of any kind. */ IsTagged(false), /** * Defines whether or not the ad is targeting user interest. */ IsUserInterestTargeted(false), /** * Labels that are attached to the AdGroupAd. * To associate an existing {@link Label} to an existing {@link AdGroupAd}, use {@link AdGroupAdService#mutateLabel} with ADD operator. * To remove an associated {@link Label} from the {@link AdGroupAd}, use {@link AdGroupAdService#mutateLabel} with REMOVE operator. * To filter on {@link Label}s, use one of {@link Predicate.Operator#CONTAINS_ALL}, {@link Predicate.Operator#CONTAINS_ANY}, {@link Predicate.Operator#CONTAINS_NONE} operators with a list of {@link Label} ids. */ @Filterable Labels(true), /** * ID of this media object. */ MediaId(false), /** * The mime type of the media. */ MimeType(false), /** * Name of the ad. */ Name(false), /** * Promotional line for this ad. * This text will be displayed in addition to the products. */ @Filterable PromotionLine(true), /** * Indicates whether the audio is ready to play on the web. */ ReadyToPlayOnTheWeb(false), /** * Media reference ID key. */ ReferenceId(false), /** * <a href="/adwords/api/docs/appendix/richmediacodes"> Certified Vendor Format ID</a>. */ RichMediaAdCertifiedVendorFormatId(false), /** * Duration for the ad (in milliseconds). * Default is 0. */ RichMediaAdDuration(false), /** * Impression beacon URL for the ad. */ RichMediaAdImpressionBeaconUrl(false), /** * Name of the rich media ad. */ RichMediaAdName(false), /** * Snippet for this ad. * Required for standard third-party ads. */ RichMediaAdSnippet(false), /** * SourceUrl pointing to the third party snippet. * For third party in-stream video ads, this stores the VAST URL. * For DFA ads, it stores the InRed URL. */ RichMediaAdSourceUrl(false), /** * Type of this rich media ad, the default is Standard. */ RichMediaAdType(false), /** * The URL of where the original media was downloaded from (or a file name). */ SourceUrl(false), /** * The status of the ad. * This field is required and should not be {@code null} when it is contained within {@link Operator}s : SET. */ @Filterable Status(true), /** * The streaming URL of the audio. */ StreamingUrl(false), /** * Duration of this ad (if it contains playable media). */ TemplateAdDuration(false), /** * Name of this ad. */ TemplateAdName(false), /** * Group ID of all template ads, which should be created together. * Template ads in the same union reference the same data but have different dimensions. * Single ads do not have a union ID. * If a template ad specifies an ad union with only one ad, no union will be created. */ TemplateAdUnionId(false), /** * The name of this field. */ TemplateElementFieldName(false), /** * Text value for text field types. * Null if not text field. * The field is a text field if type is ADDRESS, ENUM, TEXT, URL, or VISIBLE_URL. */ TemplateElementFieldText(false), /** * The type of this field. */ TemplateElementFieldType(false), /** * ID of the template to use. */ @Filterable TemplateId(true), /** * For copies, the ad id of the ad this was or should be copied from. */ TemplateOriginAdId(false), /** * A list of strings that represents the specific trademarked terms that were found in this ad. * The list returned is empty if the ad has no trademarked terms. */ Trademarks(false), /** * Type of the creative. */ Type(false), /** * Unique name for this element. */ UniqueName(false), /** * Destination URL. * <p>Do not set this field if you are using upgraded URLs, as described at: https://developers.google.com/adwords/api/docs/guides/upgraded-urls */ @Filterable Url(true), /** * URLs pointing to the resized media for the given sizes. * Only applies to image media. */ Urls(false), /** * Video Types of the ad. * (RealMedia, Quick Time etc.) */ VideoTypes(false), /** * Width of the dimension */ Width(false), /** * For YouTube-hosted videos, the YouTube video ID (as seen in YouTube URLs) may also be filled in. */ YouTubeVideoIdString(false), ; private final boolean isFilterable; private AdGroupAdField(boolean isFilterable) { this.isFilterable = isFilterable; } public boolean isFilterable() { return this.isFilterable; } }
/* * This file is part of the PSL software. * Copyright 2011-2015 University of Maryland * Copyright 2013-2018 The Regents of the University of California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.linqs.psl.experimental.optimizer.conic.program; import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.Test; import org.linqs.psl.experimental.optimizer.conic.program.Cone; import org.linqs.psl.experimental.optimizer.conic.program.ConicProgram; import org.linqs.psl.experimental.optimizer.conic.program.LinearConstraint; import org.linqs.psl.experimental.optimizer.conic.program.SecondOrderCone; import org.linqs.psl.experimental.optimizer.conic.program.Variable; import cern.colt.matrix.tdouble.DoubleMatrix1D; /** * Tests {@link ConicProgram}. */ public class ConicProgramTest { private ConicProgram program; private Variable x1, x2; @Before public final void setUp() throws Exception { program = new ConicProgram(); } private void defineSOCP() { LinearConstraint phi1 = (LinearConstraint) program.createConstraint(); LinearConstraint phi2 = (LinearConstraint) program.createConstraint(); LinearConstraint phi3 = (LinearConstraint) program.createConstraint(); LinearConstraint c1 = (LinearConstraint) program.createConstraint(); LinearConstraint c2 = (LinearConstraint) program.createConstraint(); x1 = program.createNonNegativeOrthantCone().getVariable(); x2 = program.createNonNegativeOrthantCone().getVariable(); Variable x3 = program.createNonNegativeOrthantCone().getVariable(); Variable x4 = program.createNonNegativeOrthantCone().getVariable(); Variable x5 = program.createNonNegativeOrthantCone().getVariable(); Variable x6 = program.createNonNegativeOrthantCone().getVariable(); Variable x7 = program.createNonNegativeOrthantCone().getVariable(); Variable x8 = program.createNonNegativeOrthantCone().getVariable(); Variable x9 = program.createNonNegativeOrthantCone().getVariable(); Variable x10 = program.createNonNegativeOrthantCone().getVariable(); phi1.setVariable(x1, 1.0); phi1.setVariable(x3, 1.0); phi1.setVariable(x4, -1.0); phi2.setVariable(x1, -1.0); phi2.setVariable(x2, 1.0); phi2.setVariable(x5, 1.0); phi2.setVariable(x6, -1.0); phi3.setVariable(x2, -1.0); phi3.setVariable(x7, 1.0); phi3.setVariable(x8, -1.0); c1.setVariable(x1, 1.0); c1.setVariable(x9, 1.0); c2.setVariable(x2, 1.0); c2.setVariable(x10, 1.0); phi1.setConstrainedValue(0.7); phi2.setConstrainedValue(0.0); phi3.setConstrainedValue(-0.2); c1.setConstrainedValue(1.0); c2.setConstrainedValue(1.0); /* Squares the variable x3 in the phi1 constraint */ Variable x3Sq = program.createNonNegativeOrthantCone().getVariable(); SecondOrderCone soc = program.createSecondOrderCone(3); Variable phi1OuterSquaredVar = soc.getNthVariable(); Variable phi1InnerFeatureVar = null, phi1InnerSquaredVar = null; for (Variable v : soc.getVariables()) { if (!v.equals(phi1OuterSquaredVar)) if (phi1InnerFeatureVar == null) phi1InnerFeatureVar = v; else phi1InnerSquaredVar = v; } LinearConstraint phi1InnerFeatureCon = program.createConstraint(); phi1InnerFeatureCon.setVariable(x3, 1.0); phi1InnerFeatureCon.setVariable(phi1InnerFeatureVar, -1.0); phi1InnerFeatureCon.setConstrainedValue(0.0); LinearConstraint phi1InnerSquaredCon = program.createConstraint(); phi1InnerSquaredCon.setVariable(phi1InnerSquaredVar, 1.0); phi1InnerSquaredCon.setVariable(x3Sq, 0.5); phi1InnerSquaredCon.setConstrainedValue(0.5); LinearConstraint phi1OuterSquaredCon = program.createConstraint(); phi1OuterSquaredCon.setVariable(phi1OuterSquaredVar, 1.0); phi1OuterSquaredCon.setVariable(x3Sq, -0.5); phi1OuterSquaredCon.setConstrainedValue(0.5); /* Squares the variable x5 in the phi2 constraint */ Variable x5Sq = program.createNonNegativeOrthantCone().getVariable(); soc = program.createSecondOrderCone(3); Variable phi2OuterSquaredVar = soc.getNthVariable(); Variable phi2InnerFeatureVar = null, phi2InnerSquaredVar = null; for (Variable v : soc.getVariables()) { if (!v.equals(phi2OuterSquaredVar)) if (phi2InnerFeatureVar == null) phi2InnerFeatureVar = v; else phi2InnerSquaredVar = v; } LinearConstraint phi2InnerFeatureCon = program.createConstraint(); phi2InnerFeatureCon.setVariable(x5, 1.0); phi2InnerFeatureCon.setVariable(phi2InnerFeatureVar, -1.0); phi2InnerFeatureCon.setConstrainedValue(0.0); LinearConstraint phi2InnerSquaredCon = program.createConstraint(); phi2InnerSquaredCon.setVariable(phi2InnerSquaredVar, 1.0); phi2InnerSquaredCon.setVariable(x5Sq, 0.5); phi2InnerSquaredCon.setConstrainedValue(0.5); LinearConstraint phi2OuterSquaredCon = program.createConstraint(); phi2OuterSquaredCon.setVariable(phi2OuterSquaredVar, 1.0); phi2OuterSquaredCon.setVariable(x5Sq, -0.5); phi2OuterSquaredCon.setConstrainedValue(0.5); /* Squares the variable x7 in the phi3 constraint */ Variable x7Sq = program.createNonNegativeOrthantCone().getVariable(); soc = program.createSecondOrderCone(3); Variable phi3OuterSquaredVar = soc.getNthVariable(); Variable phi3InnerFeatureVar = null, phi3InnerSquaredVar = null; for (Variable v : soc.getVariables()) { if (!v.equals(phi3OuterSquaredVar)) if (phi3InnerFeatureVar == null) phi3InnerFeatureVar = v; else phi3InnerSquaredVar = v; } LinearConstraint phi3InnerFeatureCon = program.createConstraint(); phi3InnerFeatureCon.setVariable(x7, 1.0); phi3InnerFeatureCon.setVariable(phi3InnerFeatureVar, -1.0); phi3InnerFeatureCon.setConstrainedValue(0.0); LinearConstraint phi3InnerSquaredCon = program.createConstraint(); phi3InnerSquaredCon.setVariable(phi3InnerSquaredVar, 1.0); phi3InnerSquaredCon.setVariable(x7Sq, 0.5); phi3InnerSquaredCon.setConstrainedValue(0.5); LinearConstraint phi3OuterSquaredCon = program.createConstraint(); phi3OuterSquaredCon.setVariable(phi3OuterSquaredVar, 1.0); phi3OuterSquaredCon.setVariable(x7Sq, -0.5); phi3OuterSquaredCon.setConstrainedValue(0.5); x1.setObjectiveCoefficient(0.0); x2.setObjectiveCoefficient(0.0); x3.setObjectiveCoefficient(0.0); x4.setObjectiveCoefficient(0.0); x5.setObjectiveCoefficient(0.0); x6.setObjectiveCoefficient(0.0); x7.setObjectiveCoefficient(0.0); x8.setObjectiveCoefficient(0.0); x9.setObjectiveCoefficient(0.0); x10.setObjectiveCoefficient(0.0); x3Sq.setObjectiveCoefficient(1.0); x5Sq.setObjectiveCoefficient(2.0); x7Sq.setObjectiveCoefficient(3.0); } /** Tests the creation of a second-order cone program. */ @Test public void testCreateSOCP() { defineSOCP(); assertTrue(program.getNumNNOC() == 13); assertTrue(program.gtNumSOC() == 3); assertTrue(program.getNumRSOC() == 0); assertTrue(program.getNonNegativeOrthantCones().size() == 13); assertTrue(program.getSecondOrderCones().size() == 3); assertTrue(program.getCones().size() == 16); assertTrue(program.getConstraints().size() == 14); } /** Tests checking out matrices for a second-order cone program. */ @Test public void testCheckOutSOCP() { defineSOCP(); program.checkOutMatrices(); assertTrue(program.getA().rows() == 14); assertTrue(program.getA().columns() == 22); assertTrue(program.getX().size() == 22); assertTrue(program.getB().size() == 14); assertTrue(program.getW().size() == 14); assertTrue(program.getS().size() == 22); assertTrue(program.getC().size() == 22); assertTrue(program.getC().cardinality() == 3); } /** Tests checking in matrices for a second-order cone program. */ @Test public void testCheckInSOCP() { defineSOCP(); double newPrimalValue1 = x1.getValue() + 1.0; double newPrimalValue2 = x2.getValue() + 2.0; double newDualValue1 = x1.getDualValue() + 1.0; double newDualValue2 = x2.getDualValue() + 2.0; program.checkOutMatrices(); int index1 = program.getIndex(x1); int index2 = program.getIndex(x2); DoubleMatrix1D x = program.getX(); x.set(index1, newPrimalValue1); x.set(index2, newPrimalValue2); DoubleMatrix1D s = program.getS(); s.set(index1, newDualValue1); s.set(index2, newDualValue2); program.checkInMatrices(); assertTrue(x1.getValue() == newPrimalValue1); assertTrue(x2.getValue() == newPrimalValue2); assertTrue(x1.getDualValue() == newDualValue1); assertTrue(x2.getDualValue() == newDualValue2); newPrimalValue1 = x1.getValue() + 1.0; newPrimalValue2 = x2.getValue() + 2.0; newDualValue1 = x1.getDualValue() + 1.0; newDualValue2 = x2.getDualValue() + 2.0; x1.setValue(newPrimalValue1); x2.setValue(newPrimalValue2); x1.setDualValue(newDualValue1); x2.setDualValue(newDualValue2); program.checkOutMatrices(); program.checkInMatrices(); assertTrue(x1.getValue() == newPrimalValue1); assertTrue(x2.getValue() == newPrimalValue2); assertTrue(x1.getDualValue() == newDualValue1); assertTrue(x2.getDualValue() == newDualValue2); } /** Tests deleting the components of a second-order cone program. */ @Test public void testDeleteSOCP() { defineSOCP(); for (Cone cone : program.getCones()) cone.delete(); for (LinearConstraint lc : program.getConstraints()) lc.delete(); assertTrue(program.getNumNNOC() == 0); assertTrue(program.gtNumSOC() == 0); assertTrue(program.getNumRSOC() == 0); assertTrue(program.getNonNegativeOrthantCones().size() == 0); assertTrue(program.getSecondOrderCones().size() == 0); assertTrue(program.getCones().size() == 0); assertTrue(program.getConstraints().size() == 0); } /** Tests creating, deleting, and then recreating a second-order cone program. */ @Test public void testRecreateSOCP() { defineSOCP(); for (Cone cone : program.getCones()) cone.delete(); for (LinearConstraint lc : program.getConstraints()) lc.delete(); defineSOCP(); assertTrue(program.getNumNNOC() == 13); assertTrue(program.gtNumSOC() == 3); assertTrue(program.getNumRSOC() == 0); assertTrue(program.getNonNegativeOrthantCones().size() == 13); assertTrue(program.getSecondOrderCones().size() == 3); assertTrue(program.getCones().size() == 16); assertTrue(program.getConstraints().size() == 14); program.checkOutMatrices(); assertTrue(program.getA().rows() == 14); assertTrue(program.getA().columns() == 22); assertTrue(program.getX().size() == 22); assertTrue(program.getB().size() == 14); assertTrue(program.getW().size() == 14); assertTrue(program.getS().size() == 22); assertTrue(program.getC().size() == 22); assertTrue(program.getC().cardinality() == 3); } /** Tests creating more non-negative orthant cones after checking matrices in. */ @Test public void testCreateNNOCAfterCheckIn() { defineSOCP(); program.checkOutMatrices(); program.checkInMatrices(); assertTrue(program.getNumNNOC() == 13); assertTrue(program.gtNumSOC() == 3); assertTrue(program.getNumRSOC() == 0); program.createNonNegativeOrthantCone(); program.createNonNegativeOrthantCone(); assertTrue(program.getNonNegativeOrthantCones().size() == 15); assertTrue(program.getSecondOrderCones().size() == 3); assertTrue(program.getCones().size() == 18); assertTrue(program.getConstraints().size() == 14); } /** Tests creating more constraints after checking matrices in. */ @Test public void testCreateConstraintAfterCheckIn() { defineSOCP(); program.checkOutMatrices(); program.checkInMatrices(); assertTrue(program.getNumNNOC() == 13); assertTrue(program.gtNumSOC() == 3); assertTrue(program.getNumRSOC() == 0); program.createConstraint().setVariable(x1, 1.0); program.createConstraint(); assertTrue(program.getNonNegativeOrthantCones().size() == 13); assertTrue(program.getSecondOrderCones().size() == 3); assertTrue(program.getCones().size() == 16); assertTrue(program.getConstraints().size() == 16); } /** Tests checking out matrices after checking them in and modifying the program. */ @Test public void testCheckOutModifiedSOCP() { defineSOCP(); program.checkOutMatrices(); program.checkInMatrices(); program.createNonNegativeOrthantCone(); program.createNonNegativeOrthantCone(); program.createConstraint().setVariable(x1, 1.0); program.createConstraint().setVariable(x2, 1.0); program.checkOutMatrices(); assertTrue(program.getA().rows() == 16); assertTrue(program.getA().columns() == 24); assertTrue(program.getX().size() == 24); assertTrue(program.getB().size() == 16); assertTrue(program.getW().size() == 16); assertTrue(program.getS().size() == 24); assertTrue(program.getC().size() == 24); assertTrue(program.getC().cardinality() == 3); } /** Tests adding the same variable twice to a linear constraint. */ @Test public void testAddDuplicateVariableToConstraint() { Variable x = program.createNonNegativeOrthantCone().getVariable(); LinearConstraint lc = program.createConstraint(); lc.setVariable(x, 1.0); lc.setVariable(x, -1.0); assertTrue(lc.getVariables().size() == 1); assertTrue(lc.getVariables().get(x) == -1.0); } }
/* * Copyright 2014 - 2017 Yannick Watier * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ca.watier.facturomax.javafx; import ca.watier.facturomax.enums.UiEventType; import ca.watier.facturomax.utils.Environment; import javafx.application.Application; import javafx.application.Platform; import javafx.concurrent.Worker; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.Scene; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.layout.*; import javafx.scene.paint.Color; import javafx.scene.text.Font; import javafx.scene.text.FontWeight; import javafx.scene.text.Text; import javafx.scene.web.WebEngine; import javafx.scene.web.WebView; import javafx.stage.FileChooser; import javafx.stage.Stage; import javafx.stage.StageStyle; import org.apache.commons.io.FileUtils; import org.slf4j.LoggerFactory; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.web.client.RestTemplate; import org.springframework.web.socket.TextMessage; import org.springframework.web.socket.WebSocketSession; import org.springframework.web.socket.handler.AbstractWebSocketHandler; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.events.EventTarget; import org.w3c.dom.html.HTMLAnchorElement; import javax.websocket.*; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.CookieHandler; import java.net.CookieManager; import java.net.URI; import java.net.URISyntaxException; import java.util.List; public class UiApp extends Application { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(UiApp.class); private final static FileChooser FILE_CHOOSER = new FileChooser(); private static final String FACTUROMAX = "Facturomax"; private static final Stage SPLASH_SCREEN_STAGE = new Stage(); private static final RestTemplate REST_TEMPLATE = new RestTemplate(); private static Image APP_ICON = null; private static Stage stage; private static WebEngine webEngine; private static Scene scene; private static boolean isWebSocketConnected = false; static { CookieHandler.setDefault(new CookieManager()); InputStream resourceAsStream = UiApp.class.getClassLoader().getResourceAsStream("/img/facturomax.png"); if (resourceAsStream != null) { APP_ICON = new Image(resourceAsStream); FILE_CHOOSER.getExtensionFilters().add(new FileChooser.ExtensionFilter("PDF documents (*.pdf)", "*.pdf")); SPLASH_SCREEN_STAGE.getIcons().add(APP_ICON); SPLASH_SCREEN_STAGE.setTitle(FACTUROMAX); SPLASH_SCREEN_STAGE.initStyle(StageStyle.UNDECORATED); } } public static void hideLoadingScreenAndShowMainApp() { SPLASH_SCREEN_STAGE.close(); WebView browser = new WebView(); webEngine = browser.getEngine(); scene = new Scene(browser, 850, 600); webEngine.getLoadWorker().stateProperty().addListener((observableValue, oldLoc, newLoc) -> { if (Worker.State.SUCCEEDED.equals(observableValue.getValue())) { { webEngine.executeScript("window.environment.isJavafx = true"); if (!isWebSocketConnected) { Environment.createNewUiWebSocketSession(URI.create("ws://127.0.0.1:8080/websocket/ui")); isWebSocketConnected = true; } } } }); webEngine.load("https://127.0.0.1:8443/app/index.html"); stage.setScene(scene); stage.show(); } @Override public void start(Stage mainStage) throws URISyntaxException { stage = mainStage; stage.getIcons().add(APP_ICON); Text text = new Text(FACTUROMAX); text.setFont(Font.font("Verdana", FontWeight.EXTRA_BOLD, 45)); text.setFill(Color.rgb(61, 171, 255)); Image image = new Image(getClass().getResource("/img/spinner.gif").toURI().toString()); ImageView imageView = new ImageView(image); BorderPane root = new BorderPane(); BorderPane.setAlignment(text, Pos.BOTTOM_CENTER); BorderPane.setMargin(text, new Insets(25, 0, 0, 0)); root.setTop(text); BorderPane.setAlignment(imageView, Pos.CENTER); root.setCenter(imageView); root.setBorder(new Border(new BorderStroke(Color.grayRgb(75), BorderStrokeStyle.SOLID, CornerRadii.EMPTY, new BorderWidths(10, 10, 10, 10)))); SPLASH_SCREEN_STAGE.setResizable(false); SPLASH_SCREEN_STAGE.setScene(new Scene(root, 320, 270, Color.grayRgb(93))); SPLASH_SCREEN_STAGE.show(); } @Override public void stop() throws Exception { Environment.shutdown(); super.stop(); } public static class WebSocketHandler extends AbstractWebSocketHandler { @Override protected void handleTextMessage(WebSocketSession session, TextMessage message) { final UiEventType eventType = UiEventType.valueOf(message.getPayload()); Platform.runLater(() -> { switch (eventType) { case REFRESH_INVOICE: Document document = webEngine.getDocument(); NodeList nodeList = document.getElementsByTagName("a"); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); EventTarget eventTarget = (EventTarget) node; HTMLAnchorElement currentAnchorElement = (HTMLAnchorElement) eventTarget; String currentHref = currentAnchorElement.getHref(); if (currentHref == null) { continue; } if (currentHref.matches("https://127\\.0\\.0\\.1:8443/api/private/invoice/(en|fr)/\\w*/\\d/\\d/1")) { eventTarget.addEventListener("click", evt -> { EventTarget target = evt.getCurrentTarget(); HTMLAnchorElement anchorElement = (HTMLAnchorElement) target; String href = anchorElement.getHref(); //Download the file HttpEntity<byte[]> response = REST_TEMPLATE.exchange(href, HttpMethod.GET, null, byte[].class); HttpHeaders httpHeaders = response.getHeaders(); List<String> contentList = httpHeaders.get(HttpHeaders.CONTENT_DISPOSITION); if (response.hasBody() && contentList != null && contentList.size() == 1) { String contentDisposition = contentList.get(0); String filename = contentDisposition.substring(18, contentDisposition.length() - 1); FILE_CHOOSER.setInitialFileName(filename); try { File saveFile = FILE_CHOOSER.showSaveDialog(scene.getWindow()); if (saveFile != null) { FileUtils.writeByteArrayToFile(saveFile, response.getBody()); } } catch (IOException e) { LOGGER.error(e.getMessage(), e); } } evt.preventDefault(); // cancel the event, to prevent the page to load the selected invoice in the browser }, false); } } break; } }); } } @ClientEndpoint public static class UiWSClient { @OnOpen public void onOpen(Session session, EndpointConfig endpointConfig) { LOGGER.info(String.format("New UI web socket opened (%s)", session.getId())); } @OnClose public void onClose(Session session, CloseReason closeReason) { LOGGER.info(String.format("Closed UI (%s)", session.getId())); } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.events.zeromq; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.collect.EvictingQueue; import net.jodah.failsafe.Failsafe; import net.jodah.failsafe.RetryPolicy; import org.openqa.selenium.events.Event; import org.openqa.selenium.events.EventBus; import org.openqa.selenium.events.EventListener; import org.openqa.selenium.events.EventName; import org.openqa.selenium.grid.security.Secret; import org.openqa.selenium.internal.Require; import org.openqa.selenium.json.Json; import org.openqa.selenium.json.JsonException; import org.openqa.selenium.json.JsonOutput; import org.zeromq.SocketType; import org.zeromq.ZContext; import org.zeromq.ZMQ; import java.net.Inet6Address; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.UnknownHostException; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Queue; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.logging.Level; import java.util.logging.Logger; class UnboundZmqEventBus implements EventBus { static final EventName REJECTED_EVENT = new EventName("selenium-rejected-event"); private static final Logger LOG = Logger.getLogger(EventBus.class.getName()); private static final Json JSON = new Json(); private final AtomicBoolean pollingStarted = new AtomicBoolean(false); private final ExecutorService socketPollingExecutor; private final ExecutorService socketPublishingExecutor; private final ExecutorService listenerNotificationExecutor; private final Map<EventName, List<Consumer<Event>>> listeners = new ConcurrentHashMap<>(); private final Queue<UUID> recentMessages = EvictingQueue.create(128); private final String encodedSecret; private ZMQ.Poller poller; private ZMQ.Socket pub; private ZMQ.Socket sub; UnboundZmqEventBus(ZContext context, String publishConnection, String subscribeConnection, Secret secret) { Require.nonNull("Secret", secret); StringBuilder builder = new StringBuilder(); try (JsonOutput out = JSON.newOutput(builder)) { out.setPrettyPrint(false).writeClassName(false).write(secret); } this.encodedSecret = builder.toString(); this.socketPollingExecutor = Executors.newSingleThreadExecutor(r -> { Thread thread = new Thread(r); thread.setName("Event Bus Poller"); thread.setDaemon(true); return thread; }); this.socketPublishingExecutor = Executors.newSingleThreadExecutor(r -> { Thread thread = new Thread(r); thread.setName("Event Bus Publisher"); thread.setDaemon(true); return thread; }); this.listenerNotificationExecutor = Executors.newFixedThreadPool( Math.max(Runtime.getRuntime().availableProcessors() / 2, 2), // At least two threads r -> { Thread thread = new Thread(r); thread.setName("Event Bus Listener Notifier"); thread.setDaemon(true); return thread; }); String connectionMessage = String.format("Connecting to %s and %s", publishConnection, subscribeConnection); LOG.info(connectionMessage); RetryPolicy<Object> retryPolicy = new RetryPolicy<>() .withMaxAttempts(5) .withDelay(5, 10, ChronoUnit.SECONDS) .onFailedAttempt(e -> LOG.log(Level.WARNING, String.format("%s failed", connectionMessage))) .onRetry(e -> LOG.log(Level.WARNING, String.format("Failure #%s. Retrying.", e.getAttemptCount()))) .onRetriesExceeded(e -> LOG.log(Level.WARNING, "Connection aborted.")); // Access to the zmq socket is safe here: no threads. Failsafe.with(retryPolicy).run( () -> { sub = context.createSocket(SocketType.SUB); sub.setIPv6(isSubAddressIPv6(publishConnection)); sub.connect(publishConnection); sub.subscribe(new byte[0]); pub = context.createSocket(SocketType.PUB); pub.setIPv6(isSubAddressIPv6(subscribeConnection)); pub.connect(subscribeConnection); } ); // Connections are already established this.poller = context.createPoller(1); this.poller.register(Objects.requireNonNull(sub), ZMQ.Poller.POLLIN); LOG.info("Sockets created"); socketPollingExecutor.submit(new PollingRunnable(secret)); // Give ourselves up to a second to connect, using The World's Worst heuristic. If we don't // manage to connect, it's not the end of the world, as the socket we're connecting to may not // be up yet. while (!pollingStarted.get()) { try { Thread.sleep(1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } LOG.info("Event bus ready"); } @Override public boolean isReady() { return !socketPollingExecutor.isShutdown(); } private boolean isSubAddressIPv6(String connection) { try { URI uri = new URI(connection); if ("inproc".equals(uri.getScheme())) { return false; } return InetAddress.getByName(uri.getHost()) instanceof Inet6Address; } catch (UnknownHostException | URISyntaxException e) { LOG.log(Level.WARNING, String.format("Could not determine if the address %s is IPv6 or IPv4", connection), e); } return false; } @Override public void addListener(EventListener<?> listener) { Require.nonNull("Listener", listener); List<Consumer<Event>> typeListeners = listeners.computeIfAbsent(listener.getEventName(), t -> new LinkedList<>()); typeListeners.add(listener); } @Override public void fire(Event event) { Require.nonNull("Event to send", event); socketPublishingExecutor.execute(() -> { pub.sendMore(event.getType().getName().getBytes(UTF_8)); pub.sendMore(encodedSecret.getBytes(UTF_8)); pub.sendMore(event.getId().toString().getBytes(UTF_8)); pub.send(event.getRawData().getBytes(UTF_8)); }); } @Override public void close() { socketPollingExecutor.shutdownNow(); socketPublishingExecutor.shutdownNow(); listenerNotificationExecutor.shutdownNow(); poller.close(); if (sub != null) { sub.close(); } if (pub != null) { pub.close(); } } private class PollingRunnable implements Runnable { private Secret secret; public PollingRunnable(Secret secret) { this.secret = secret; } @Override public void run() { while (!Thread.currentThread().isInterrupted()) { try { int count = poller.poll(150); pollingStarted.lazySet(true); for (int i = 0; i < count; i++) { if (poller.pollin(i)) { ZMQ.Socket socket = poller.getSocket(i); EventName eventName = new EventName(new String(socket.recv(), UTF_8)); Secret eventSecret; String receivedEventSecret = new String(socket.recv(), UTF_8); try { eventSecret = JSON.toType(receivedEventSecret, Secret.class); } catch (JsonException e) { rejectEvent(eventName, receivedEventSecret); return; } UUID id = UUID.fromString(new String(socket.recv(), UTF_8)); String data = new String(socket.recv(), UTF_8); // Don't bother doing more work if we've seen this message. if (recentMessages.contains(id)) { return; } Object converted = JSON.toType(data, Object.class); Event event = new Event(id, eventName, converted); recentMessages.add(id); if (!Secret.matches(secret, eventSecret)) { rejectEvent(eventName, data); return; } notifyListeners(eventName, event); } } } catch (Exception e) { if (e.getCause() instanceof AssertionError) { // Do nothing. } else { LOG.log(Level.WARNING, e, () -> "Caught exception while polling for event bus messages: " + e.getMessage()); } } } } private void rejectEvent(EventName eventName, String data) { Event rejectedEvent = new Event(REJECTED_EVENT, new ZeroMqEventBus.RejectedEvent(eventName, data)); LOG.log(Level.SEVERE, "Received message without a valid secret. Rejecting. {0} -> {1}", new Object[]{rejectedEvent, data}); // String formatting only applied if needed notifyListeners(REJECTED_EVENT, rejectedEvent); } private void notifyListeners(EventName eventName, Event event) { List<Consumer<Event>> eventListeners = listeners.getOrDefault(eventName, new ArrayList<>()); eventListeners .forEach(listener -> listenerNotificationExecutor.submit(() -> { try { listener.accept(event); } catch (Exception e) { LOG.log(Level.WARNING, e, () -> "Caught exception from listener: " + listener); } })); } } }
package org.apereo.cas.oidc.token; import org.apereo.cas.authentication.AuthenticationHandler; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.oidc.OidcConstants; import org.apereo.cas.services.OidcRegisteredService; import org.apereo.cas.services.ServicesManager; import org.apereo.cas.support.oauth.OAuth20Constants; import org.apereo.cas.support.oauth.OAuth20ResponseTypes; import org.apereo.cas.support.oauth.services.OAuthRegisteredService; import org.apereo.cas.ticket.BaseIdTokenGeneratorService; import org.apereo.cas.ticket.OidcTokenSigningAndEncryptionService; import org.apereo.cas.ticket.TicketGrantingTicket; import org.apereo.cas.ticket.accesstoken.AccessToken; import org.apereo.cas.ticket.registry.TicketRegistry; import org.apereo.cas.util.CollectionUtils; import org.apereo.cas.util.DigestUtils; import org.apereo.cas.util.EncodingUtils; import org.apereo.cas.util.Pac4jUtils; import com.google.common.base.Preconditions; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.codec.digest.MessageDigestAlgorithms; import org.apache.commons.lang3.ArrayUtils; import org.jose4j.jws.AlgorithmIdentifiers; import org.jose4j.jwt.JwtClaims; import org.jose4j.jwt.NumericDate; import org.pac4j.core.context.J2EContext; import org.pac4j.core.profile.UserProfile; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Map.Entry; import java.util.stream.Stream; /** * This is {@link OidcIdTokenGeneratorService}. * * @author Misagh Moayyed * @since 5.0.0 */ @Slf4j @Getter public class OidcIdTokenGeneratorService extends BaseIdTokenGeneratorService { public OidcIdTokenGeneratorService(final CasConfigurationProperties casProperties, final OidcTokenSigningAndEncryptionService signingService, final ServicesManager servicesManager, final TicketRegistry ticketRegistry) { super(casProperties, signingService, servicesManager, ticketRegistry); } @Override public String generate(final HttpServletRequest request, final HttpServletResponse response, final AccessToken accessToken, final long timeoutInSeconds, final OAuth20ResponseTypes responseType, final OAuthRegisteredService registeredService) { if (!(registeredService instanceof OidcRegisteredService)) { throw new IllegalArgumentException("Registered service instance is not an OIDC service"); } val oidcRegisteredService = (OidcRegisteredService) registeredService; val context = Pac4jUtils.getPac4jJ2EContext(request, response); LOGGER.trace("Attempting to produce claims for the id token [{}]", accessToken); val authenticatedProfile = getAuthenticatedProfile(request, response); val claims = buildJwtClaims(request, accessToken, timeoutInSeconds, oidcRegisteredService, authenticatedProfile, context, responseType); return encodeAndFinalizeToken(claims, oidcRegisteredService, accessToken); } /** * Produce claims as jwt. * * @param request the request * @param accessTokenId the access token id * @param timeoutInSeconds the timeoutInSeconds * @param service the service * @param profile the user profile * @param context the context * @param responseType the response type * @return the jwt claims */ protected JwtClaims buildJwtClaims(final HttpServletRequest request, final AccessToken accessTokenId, final long timeoutInSeconds, final OidcRegisteredService service, final UserProfile profile, final J2EContext context, final OAuth20ResponseTypes responseType) { val authentication = accessTokenId.getAuthentication(); val principal = authentication.getPrincipal(); val oidc = casProperties.getAuthn().getOidc(); val claims = new JwtClaims(); claims.setJwtId(getOAuthServiceTicket(accessTokenId.getTicketGrantingTicket()).getKey()); claims.setIssuer(oidc.getIssuer()); claims.setAudience(service.getClientId()); val expirationDate = NumericDate.now(); expirationDate.addSeconds(timeoutInSeconds); claims.setExpirationTime(expirationDate); claims.setIssuedAtToNow(); claims.setNotBeforeMinutesInThePast(oidc.getSkew()); claims.setSubject(principal.getId()); val mfa = casProperties.getAuthn().getMfa(); val attributes = authentication.getAttributes(); if (attributes.containsKey(mfa.getAuthenticationContextAttribute())) { val val = CollectionUtils.toCollection(attributes.get(mfa.getAuthenticationContextAttribute())); claims.setStringClaim(OidcConstants.ACR, val.iterator().next().toString()); } if (attributes.containsKey(AuthenticationHandler.SUCCESSFUL_AUTHENTICATION_HANDLERS)) { val val = CollectionUtils.toCollection(attributes.get(AuthenticationHandler.SUCCESSFUL_AUTHENTICATION_HANDLERS)); claims.setStringListClaim(OidcConstants.AMR, val.toArray(ArrayUtils.EMPTY_STRING_ARRAY)); } claims.setStringClaim(OAuth20Constants.CLIENT_ID, service.getClientId()); claims.setClaim(OAuth20Constants.STATE, attributes.get(OAuth20Constants.STATE)); claims.setClaim(OAuth20Constants.NONCE, attributes.get(OAuth20Constants.NONCE)); claims.setClaim(OidcConstants.CLAIM_AT_HASH, generateAccessTokenHash(accessTokenId, service)); principal.getAttributes().entrySet().stream() .filter(entry -> oidc.getClaims().contains(entry.getKey())) .forEach(entry -> claims.setClaim(entry.getKey(), entry.getValue())); if (!claims.hasClaim(OidcConstants.CLAIM_PREFERRED_USERNAME)) { claims.setClaim(OidcConstants.CLAIM_PREFERRED_USERNAME, profile.getId()); } return claims; } /** * Gets oauth service ticket. * * @param tgt the tgt * @return the o auth service ticket */ protected Entry<String, Service> getOAuthServiceTicket(final TicketGrantingTicket tgt) { val oAuthCallbackUrl = casProperties.getServer().getPrefix() + OAuth20Constants.BASE_OAUTH20_URL + '/' + OAuth20Constants.CALLBACK_AUTHORIZE_URL_DEFINITION; val oAuthServiceTicket = Stream.concat( tgt.getServices().entrySet().stream(), tgt.getProxyGrantingTickets().entrySet().stream()) .filter(e -> { val service = servicesManager.findServiceBy(e.getValue()); return service != null && service.getServiceId().equals(oAuthCallbackUrl); }) .findFirst(); Preconditions.checkState(oAuthServiceTicket.isPresent(), "Cannot find service ticket issued to " + oAuthCallbackUrl + " as part of the authentication context"); return oAuthServiceTicket.get(); } /** * Generate access token hash string. * * @param accessTokenId the access token id * @param service the service * @return the string */ protected String generateAccessTokenHash(final AccessToken accessTokenId, final OidcRegisteredService service) { val tokenBytes = accessTokenId.getId().getBytes(StandardCharsets.UTF_8); val hashAlg = getSigningHashAlgorithm(service); LOGGER.debug("Digesting access token hash via algorithm [{}]", hashAlg); val digested = DigestUtils.rawDigest(hashAlg, tokenBytes); val hashBytesLeftHalf = Arrays.copyOf(digested, digested.length / 2); return EncodingUtils.encodeUrlSafeBase64(hashBytesLeftHalf); } /** * Gets signing hash algorithm. * * @param service the service * @return the signing hash algorithm */ protected String getSigningHashAlgorithm(final OidcRegisteredService service) { val alg = signingService.getJsonWebKeySigningAlgorithm(service); LOGGER.debug("Signing algorithm specified by service [{}] is [{}]", service.getServiceId(), alg); if (AlgorithmIdentifiers.RSA_USING_SHA512.equalsIgnoreCase(alg)) { return MessageDigestAlgorithms.SHA_512; } if (AlgorithmIdentifiers.RSA_USING_SHA384.equalsIgnoreCase(alg)) { return MessageDigestAlgorithms.SHA_384; } if (AlgorithmIdentifiers.RSA_USING_SHA256.equalsIgnoreCase(alg)) { return MessageDigestAlgorithms.SHA_256; } throw new IllegalArgumentException("Could not determine the hash algorithm for the id token issued to service " + service.getServiceId()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.testutils.publishing; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.isNull; import static java.util.Objects.requireNonNull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNoneBlank; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.Map; import org.apache.beam.sdk.testutils.NamedTestResult; import org.apache.commons.compress.utils.Charsets; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.entity.GzipCompressingEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({ "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) }) public final class InfluxDBPublisher { private static final Logger LOG = LoggerFactory.getLogger(InfluxDBPublisher.class); private InfluxDBPublisher() {} public static void publishNexmarkResults( final Collection<Map<String, Object>> results, final InfluxDBSettings settings, final Map<String, String> tags) { publishWithCheck(settings, () -> publishNexmark(results, settings, tags)); } public static void publishWithSettings( final Collection<NamedTestResult> results, final InfluxDBSettings settings) { publishWithCheck(settings, () -> publishCommon(results, settings)); } private static void publishWithCheck( final InfluxDBSettings settings, final PublishFunction publishFunction) { requireNonNull(settings, "InfluxDB settings must not be null"); if (isNoneBlank(settings.measurement, settings.database)) { try { publishFunction.publish(); } catch (Exception exception) { LOG.warn("Unable to publish metrics due to error: {}", exception.getMessage()); } } else { LOG.warn("Missing property -- measurement/database. Metrics won't be published."); } } private static void publishNexmark( final Collection<Map<String, Object>> results, final InfluxDBSettings settings, final Map<String, String> tags) throws Exception { final HttpClientBuilder builder = provideHttpBuilder(settings); final HttpPost postRequest = providePOSTRequest(settings); final StringBuilder metricBuilder = new StringBuilder(); results.forEach( map -> { metricBuilder.append(map.get("measurement")).append(",").append(getKV(map, "runner")); if (tags != null && !tags.isEmpty()) { tags.entrySet().stream() .forEach( entry -> { metricBuilder .append(",") .append(entry.getKey()) .append("=") .append(entry.getValue()); }); } metricBuilder .append(" ") .append(getKV(map, "runtimeMs")) .append(",") .append(getKV(map, "numResults")) .append(" ") .append(map.get("timestamp")) .append('\n'); }); postRequest.setEntity( new GzipCompressingEntity(new ByteArrayEntity(metricBuilder.toString().getBytes(UTF_8)))); executeWithVerification(postRequest, builder); } private static String getKV(final Map<String, Object> map, final String key) { return key + "=" + map.get(key); } private static void publishCommon( final Collection<NamedTestResult> results, final InfluxDBSettings settings) throws Exception { final HttpClientBuilder builder = provideHttpBuilder(settings); final HttpPost postRequest = providePOSTRequest(settings); final StringBuilder metricBuilder = new StringBuilder(); results.stream() .map(NamedTestResult::toMap) .forEach( map -> metricBuilder .append(settings.measurement) .append(",") .append(getKV(map, "test_id")) .append(",") .append(getKV(map, "metric")) .append(" ") .append(getKV(map, "value")) .append('\n')); postRequest.setEntity(new ByteArrayEntity(metricBuilder.toString().getBytes(UTF_8))); executeWithVerification(postRequest, builder); } private static HttpClientBuilder provideHttpBuilder(final InfluxDBSettings settings) { final HttpClientBuilder builder = HttpClientBuilder.create(); if (isNoneBlank(settings.userName, settings.userPassword)) { final CredentialsProvider provider = new BasicCredentialsProvider(); provider.setCredentials( AuthScope.ANY, new UsernamePasswordCredentials(settings.userName, settings.userPassword)); builder.setDefaultCredentialsProvider(provider); } return builder; } private static HttpPost providePOSTRequest(final InfluxDBSettings settings) { final String retentionPolicy = "rp" + (isBlank(settings.retentionPolicy) ? "" : "=" + settings.retentionPolicy); return new HttpPost( settings.host + "/write?db=" + settings.database + "&" + retentionPolicy + "&precision=s"); } private static void executeWithVerification( final HttpPost postRequest, final HttpClientBuilder builder) throws IOException { try (final CloseableHttpResponse response = builder.build().execute(postRequest)) { is2xx(response); } } private static void is2xx(final HttpResponse response) throws IOException { final int code = response.getStatusLine().getStatusCode(); if (code < 200 || code >= 300) { throw new IOException( "Response code: " + code + ". Reason: " + getErrorMessage(response.getEntity())); } } private static String getErrorMessage(final HttpEntity entity) throws IOException { final Header encodingHeader = entity.getContentEncoding(); final Charset encoding = encodingHeader == null ? StandardCharsets.UTF_8 : Charsets.toCharset(encodingHeader.getValue()); final JsonElement errorElement = new Gson().fromJson(EntityUtils.toString(entity, encoding), JsonObject.class).get("error"); return isNull(errorElement) ? "[Unable to get error message]" : errorElement.toString(); } @FunctionalInterface private interface PublishFunction { void publish() throws Exception; } }
// Copyright 2018 The Svalbard Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // /////////////////////////////////////////////////////////////////////////////// package com.google.security.svalbard.client; import com.google.common.util.concurrent.MoreExecutors; import com.google.protobuf.TextFormat; import com.google.security.svalbard.proto.ShareLocation; import java.io.ByteArrayOutputStream; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.Charset; import java.nio.file.Paths; import java.security.GeneralSecurityException; import java.util.concurrent.ExecutionException; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * A command-line utility for testing ServerShareManager-class. * It requires at least 2 arguments: * rootDir: a directory for the secondary communication channel * operation: the operation to be performed by the client * params: params for the client request (optional, depending on operation) */ public class ServerShareManagerCli { public static final Charset UTF_8 = Charset.forName("UTF-8"); public static void main(String[] args) throws Exception { if (args.length < 2) { System.out.println( "Usage: ServerShareManagerCli root-dir operation ... "); System.exit(1); } String rootDir = args[0]; String operation = args[1]; FileSecondaryChannel fileChannel = new FileSecondaryChannel(rootDir); ServerShareManager manager = new ServerShareManager(fileChannel, MoreExecutors.newDirectExecutorService()); switch (operation) { case "get_deletion_token": // fall through case "get_retrieval_token": // fall through case "get_storage_token": { Matcher matcher = Pattern.compile("get_(.*)_token").matcher(operation); if (!matcher.matches()) { System.out.println("Internal regex matcher error: " + operation); System.exit(1); } String op = matcher.group(1); if (args.length != 5) { System.out.println( "Usage: SvalbardClientCli root-dir " + operation + " req-id-filename secret-name share-location"); System.exit(1); } String reqIdFilename = args[2]; String secretName = args[3]; ShareLocation.Builder locationBuilder = ShareLocation.newBuilder(); TextFormat.merge(args[4], locationBuilder); String requestId = ServerShareManager.getNewRequestId(); try { String token = manager.getOperationToken( requestId, op, locationBuilder.build(), secretName).get(); System.out.println("Got " + op + "-token: " + token); write(requestId.getBytes(UTF_8), reqIdFilename); } catch (GeneralSecurityException | ExecutionException e) { System.out.println("Failure: " + e); System.exit(1); } } break; case "store_share": { if (args.length != 5) { System.out.println( "Usage: SvalbardClientCli root-dir " + operation + " secret-name share-value share-location"); System.exit(1); } String secretName = args[2]; String shareValue = args[3]; String locationText = args[4]; ShareLocation.Builder locationBuilder = ShareLocation.newBuilder(); TextFormat.merge(locationText, locationBuilder); try { manager.storeShare(secretName, shareValue.getBytes(UTF_8), locationBuilder.build()).get(); System.out.println("--- Stored."); } catch (GeneralSecurityException e) { System.out.println("Failure: " + e); System.exit(1); } } break; case "retrieve_share": { if (args.length != 5) { System.out.println( "Usage: SvalbardClientCli root-dir " + operation + " secret-name share-location share-file"); System.exit(1); } String secretName = args[2]; String locationText = args[3]; String shareFilename = args[4]; ShareLocation.Builder locationBuilder = ShareLocation.newBuilder(); TextFormat.merge(locationText, locationBuilder); try { byte[] share = manager.retrieveShare(secretName, locationBuilder.build()).get(); System.out.println("Retrieved."); write(share, shareFilename); } catch (GeneralSecurityException e) { System.out.println("Failure: " + e); System.exit(1); } } break; case "delete_share": { if (args.length != 4) { System.out.println( "Usage: SvalbardClientCli root-dir " + operation + " secret-name share-location"); System.exit(1); } String secretName = args[2]; String locationText = args[3]; ShareLocation.Builder locationBuilder = ShareLocation.newBuilder(); TextFormat.merge(locationText, locationBuilder); try { manager.deleteShare(secretName, locationBuilder.build()).get(); System.out.println("Deleted."); } catch (GeneralSecurityException e) { System.out.println("Failure: " + e); System.exit(1); } } break; case "store_share_with_token": { if (args.length != 6) { System.out.println( "Usage: SvalbardClientCli root-dir " + operation + " secret-name share-value share-location token"); System.exit(1); } String secretName = args[2]; String shareValue = args[3]; String locationText = args[4]; String token = args[5]; ShareLocation.Builder locationBuilder = ShareLocation.newBuilder(); TextFormat.merge(locationText, locationBuilder); try { manager.storeShare( secretName, shareValue.getBytes(UTF_8), locationBuilder.build(), token); System.out.println("Stored."); } catch (GeneralSecurityException e) { System.out.println("Failure: " + e); System.exit(1); } } break; case "retrieve_share_with_token": { if (args.length != 6) { System.out.println( "Usage: SvalbardClientCli root-dir " + operation + " secret-name share-location token share-file"); System.exit(1); } String secretName = args[2]; String locationText = args[3]; String token = args[4]; String shareFilename = args[5]; ShareLocation.Builder locationBuilder = ShareLocation.newBuilder(); TextFormat.merge(locationText, locationBuilder); try { byte[] share = manager.retrieveShare(secretName, locationBuilder.build(), token); System.out.println("Retrieved."); write(share, shareFilename); } catch (GeneralSecurityException e) { System.out.println("Failure: " + e); System.exit(1); } } break; case "delete_share_with_token": { if (args.length != 5) { System.out.println( "Usage: SvalbardClientCli root-dir " + operation + " secret-name share-location token"); System.exit(1); } String secretName = args[2]; String locationText = args[3]; String token = args[4]; ShareLocation.Builder locationBuilder = ShareLocation.newBuilder(); TextFormat.merge(locationText, locationBuilder); try { manager.deleteShare(secretName, locationBuilder.build(), token); System.out.println("Deleted."); } catch (GeneralSecurityException e) { System.out.println("Failure: " + e); System.exit(1); } } break; default: throw new IllegalArgumentException("Unsupported operation: " + operation); } } private static void write(byte[] contents, String filename) throws IOException { try (OutputStream outputStream = new FileOutputStream(Paths.get(filename).toFile())) { outputStream.write(contents); } } public static byte[] read(String filename) throws GeneralSecurityException, IOException { InputStream inputStream = new FileInputStream(Paths.get(filename).toFile()); ByteArrayOutputStream result = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length; while ((length = inputStream.read(buffer)) != -1) { result.write(buffer, 0, length); } inputStream.close(); return result.toByteArray(); } }
package com.taobao.tddl.dbsync.binlog; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.io.OutputStream; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.SocketTimeoutException; import java.sql.Connection; import java.sql.SQLException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * TODO: Document It!! * * <pre> * DirectLogFetcher fetcher = new DirectLogFetcher(); * fetcher.open(conn, file, 0, 13); * * while (fetcher.fetch()) * { * LogEvent event; * do * { * event = decoder.decode(fetcher, context); * * // process log event. * } * while (event != null); * } * // connection closed. * </pre> * * @author <a href="mailto:changyuan.lh@taobao.com">Changyuan.lh</a> * @version 1.0 */ public final class DirectLogFetcher extends LogFetcher { protected static final Log logger = LogFactory.getLog(DirectLogFetcher.class); /** Command to dump binlog */ public static final byte COM_BINLOG_DUMP = 18; /** Packet header sizes */ public static final int NET_HEADER_SIZE = 4; public static final int SQLSTATE_LENGTH = 5; /** Packet offsets */ public static final int PACKET_LEN_OFFSET = 0; public static final int PACKET_SEQ_OFFSET = 3; /** Maximum packet length */ public static final int MAX_PACKET_LENGTH = (256 * 256 * 256 - 1); /** BINLOG_DUMP options */ public static final int BINLOG_DUMP_NON_BLOCK = 1; private Connection conn; private OutputStream mysqlOutput; private InputStream mysqlInput; public DirectLogFetcher() { super(DEFAULT_INITIAL_CAPACITY, DEFAULT_GROWTH_FACTOR); } public DirectLogFetcher(final int initialCapacity) { super(initialCapacity, DEFAULT_GROWTH_FACTOR); } public DirectLogFetcher(final int initialCapacity, final float growthFactor) { super(initialCapacity, growthFactor); } private static final Object unwrapConnection(Object conn, Class<?> connClazz) throws IOException { while (!connClazz.isInstance(conn)) { try { Class<?> connProxy = Class.forName("org.springframework.jdbc.datasource.ConnectionProxy"); if (connProxy.isInstance(conn)) { conn = invokeMethod(conn, connProxy, "getTargetConnection"); continue; } } catch (ClassNotFoundException e) { // org.springframework.jdbc.datasource.ConnectionProxy not found. } try { Class<?> connProxy = Class.forName("org.apache.commons.dbcp.DelegatingConnection"); if (connProxy.isInstance(conn)) { conn = getDeclaredField(conn, connProxy, "_conn"); continue; } } catch (ClassNotFoundException e) { // org.apache.commons.dbcp.DelegatingConnection not found. } try { if (conn instanceof java.sql.Wrapper) { Class<?> connIface = Class.forName("com.mysql.jdbc.Connection"); conn = ((java.sql.Wrapper) conn).unwrap(connIface); continue; } } catch (ClassNotFoundException e) { // com.mysql.jdbc.Connection not found. } catch (SQLException e) { logger.warn("Unwrap " + conn.getClass().getName() + " to " + connClazz.getName() + " failed: " + e.getMessage(), e); } return null; } return conn; } private static final Object invokeMethod(Object obj, Class<?> objClazz, String name) { try { Method method = objClazz.getMethod(name, (Class<?>[]) null); return method.invoke(obj, (Object[]) null); } catch (NoSuchMethodException e) { throw new IllegalArgumentException("No such method: \'" + name + "\' @ " + objClazz.getName(), e); } catch (IllegalAccessException e) { throw new IllegalArgumentException("Cannot invoke method: \'" + name + "\' @ " + objClazz.getName(), e); } catch (InvocationTargetException e) { throw new IllegalArgumentException("Invoke method failed: \'" + name + "\' @ " + objClazz.getName(), e.getTargetException()); } } private static final Object getDeclaredField(Object obj, Class<?> objClazz, String name) { try { Field field = objClazz.getDeclaredField(name); field.setAccessible(true); return field.get(obj); } catch (NoSuchFieldException e) { throw new IllegalArgumentException("No such field: \'" + name + "\' @ " + objClazz.getName(), e); } catch (IllegalAccessException e) { throw new IllegalArgumentException("Cannot get field: \'" + name + "\' @ " + objClazz.getName(), e); } } /** * Connect MySQL master to fetch binlog. */ public void open(Connection conn, String fileName, final int serverId) throws IOException { open(conn, fileName, BIN_LOG_HEADER_SIZE, serverId, false); } /** * Connect MySQL master to fetch binlog. */ public void open(Connection conn, String fileName, final int serverId, boolean nonBlocking) throws IOException { open(conn, fileName, BIN_LOG_HEADER_SIZE, serverId, nonBlocking); } /** * Connect MySQL master to fetch binlog. */ public void open(Connection conn, String fileName, final long filePosition, final int serverId) throws IOException { open(conn, fileName, filePosition, serverId, false); } /** * Connect MySQL master to fetch binlog. */ public void open(Connection conn, String fileName, long filePosition, final int serverId, boolean nonBlocking) throws IOException { try { this.conn = conn; Class<?> connClazz = Class.forName("com.mysql.jdbc.ConnectionImpl"); Object unwrapConn = unwrapConnection(conn, connClazz); if (unwrapConn == null) { throw new IOException("Unable to unwrap " + conn.getClass().getName() + " to com.mysql.jdbc.ConnectionImpl"); } // Get underlying IO streams for network communications. Object connIo = getDeclaredField(unwrapConn, connClazz, "io"); if (connIo == null) { throw new IOException("Get null field:" + conn.getClass().getName() + "#io"); } mysqlOutput = (OutputStream) getDeclaredField(connIo, connIo.getClass(), "mysqlOutput"); mysqlInput = (InputStream) getDeclaredField(connIo, connIo.getClass(), "mysqlInput"); if (filePosition == 0) filePosition = BIN_LOG_HEADER_SIZE; sendBinlogDump(fileName, filePosition, serverId, nonBlocking); position = 0; } catch (IOException e) { close(); /* Do cleanup */ logger.error("Error on COM_BINLOG_DUMP: file = " + fileName + ", position = " + filePosition); throw e; } catch (ClassNotFoundException e) { close(); /* Do cleanup */ throw new IOException( "Unable to load com.mysql.jdbc.ConnectionImpl", e); } } /** * Put a byte in the buffer. * * @param b the byte to put in the buffer */ protected final void putByte(byte b) { ensureCapacity(position + 1); buffer[position++] = b; } /** * Put 16-bit integer in the buffer. * * @param i16 the integer to put in the buffer */ protected final void putInt16(int i16) { ensureCapacity(position + 2); byte[] buf = buffer; buf[position++] = (byte) (i16 & 0xff); buf[position++] = (byte) (i16 >>> 8); } /** * Put 32-bit integer in the buffer. * * @param i32 the integer to put in the buffer */ protected final void putInt32(long i32) { ensureCapacity(position + 4); byte[] buf = buffer; buf[position++] = (byte) (i32 & 0xff); buf[position++] = (byte) (i32 >>> 8); buf[position++] = (byte) (i32 >>> 16); buf[position++] = (byte) (i32 >>> 24); } /** * Put a string in the buffer. * * @param s the value to put in the buffer */ protected final void putString(String s) { ensureCapacity(position + (s.length() * 2) + 1); System.arraycopy(s.getBytes(), 0, buffer, position, s.length()); position += s.length(); buffer[position++] = 0; } protected final void sendBinlogDump(String fileName, final long filePosition, final int serverId, boolean nonBlocking) throws IOException { position = NET_HEADER_SIZE; putByte(COM_BINLOG_DUMP); putInt32(filePosition); putInt16(nonBlocking ? BINLOG_DUMP_NON_BLOCK : 0); // binlog_flags putInt32(serverId); // slave's server-id putString(fileName); final byte[] buf = buffer; final int len = position - NET_HEADER_SIZE; buf[0] = (byte) (len & 0xff); buf[1] = (byte) (len >>> 8); buf[2] = (byte) (len >>> 16); mysqlOutput.write(buffer, 0, position); mysqlOutput.flush(); } /** * {@inheritDoc} * * @see com.taobao.tddl.dbsync.binlog.LogFetcher#fetch() */ public boolean fetch() throws IOException { try { // Fetching packet header from input. if (!fetch0(0, NET_HEADER_SIZE)) { logger.warn("Reached end of input stream while fetching header"); return false; } // Fetching the first packet(may a multi-packet). int netlen = getUint24(PACKET_LEN_OFFSET); int netnum = getUint8(PACKET_SEQ_OFFSET); if (!fetch0(NET_HEADER_SIZE, netlen)) { logger.warn("Reached end of input stream: packet #" + netnum + ", len = " + netlen); return false; } // Detecting error code. final int mark = getUint8(NET_HEADER_SIZE); if (mark != 0) { if (mark == 255) // error from master { // Indicates an error, for example trying to fetch from wrong // binlog position. position = NET_HEADER_SIZE + 1; final int errno = getInt16(); String sqlstate = forward(1).getFixString(SQLSTATE_LENGTH); String errmsg = getFixString(limit - position); throw new IOException("Received error packet:" + " errno = " + errno + ", sqlstate = " + sqlstate + " errmsg = " + errmsg); } else if (mark == 254) { // Indicates end of stream. It's not clear when this would // be sent. logger.warn("Received EOF packet from server, apparent" + " master disconnected."); return false; } else { // Should not happen. throw new IOException("Unexpected response " + mark + " while fetching binlog: packet #" + netnum + ", len = " + netlen); } } // The first packet is a multi-packet, concatenate the packets. while (netlen == MAX_PACKET_LENGTH) { if (!fetch0(0, NET_HEADER_SIZE)) { logger.warn("Reached end of input stream while fetching header"); return false; } netlen = getUint24(PACKET_LEN_OFFSET); netnum = getUint8(PACKET_SEQ_OFFSET); if (!fetch0(limit, netlen)) { logger.warn("Reached end of input stream: packet #" + netnum + ", len = " + netlen); return false; } } // Preparing buffer variables to decoding. origin = NET_HEADER_SIZE + 1; position = origin; limit -= origin; return true; } catch (SocketTimeoutException e) { close(); /* Do cleanup */ logger.error("Socket timeout expired, closing connection", e); throw e; } catch (InterruptedIOException e) { close(); /* Do cleanup */ logger.warn("I/O interrupted while reading from client socket", e); throw e; } catch (IOException e) { close(); /* Do cleanup */ logger.error("I/O error while reading from client socket", e); throw e; } } private final boolean fetch0(final int off, final int len) throws IOException { ensureCapacity(off + len); for (int count, n = 0; n < len; n += count) { if (0 > (count = mysqlInput.read(buffer, off + n, len - n))) { // Reached end of input stream return false; } } if (limit < off + len) limit = off + len; return true; } /** * {@inheritDoc} * * @see com.taobao.tddl.dbsync.binlog.LogFetcher#close() */ public void close() throws IOException { try { if (conn != null) conn.close(); conn = null; mysqlInput = null; mysqlOutput = null; } catch (SQLException e) { logger.warn("Unable to close connection", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.aws.sqs; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument; import com.amazonaws.services.sqs.AmazonSQS; import com.amazonaws.services.sqs.AmazonSQSClientBuilder; import com.amazonaws.services.sqs.model.Message; import com.amazonaws.services.sqs.model.SendMessageRequest; import com.google.auto.value.AutoValue; import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.io.aws.options.AwsOptions; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.joda.time.Duration; /** * An unbounded source for Amazon Simple Queue Service (SQS). * * <h3>Reading from an SQS queue</h3> * * <p>The {@link SqsIO} {@link Read} returns an unbounded {@link PCollection} of {@link * com.amazonaws.services.sqs.model.Message} containing the received messages. Note: This source * does not currently advance the watermark when no new messages are received. * * <p>To configure an SQS source, you have to provide the queueUrl to connect to. The following * example illustrates how to configure the source: * * <pre>{@code * pipeline.apply(SqsIO.read().withQueueUrl(queueUrl)) * }</pre> * * <h3>Writing to an SQS queue</h3> * * <p>The following example illustrates how to use the sink: * * <pre>{@code * pipeline * .apply(...) // returns PCollection<SendMessageRequest> * .apply(SqsIO.write()) * }</pre> * * <h3>Additional Configuration</h3> * * <p>Additional configuration can be provided via {@link AwsOptions} from command line args or in * code. For example, if you wanted to provide a secret access key via code: * * <pre>{@code * PipelineOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).withValidation().create(); * AwsOptions awsOptions = pipelineOptions.as(AwsOptions.class); * BasicAWSCredentials awsCreds = new BasicAWSCredentials("accesskey", "secretkey"); * awsOptions.setAwsCredentialsProvider(new AWSStaticCredentialsProvider(awsCreds)); * Pipeline pipeline = Pipeline.create(options); * }</pre> * * <p>For more information on the available options see {@link AwsOptions}. */ @Experimental(Experimental.Kind.SOURCE_SINK) public class SqsIO { public static Read read() { return new AutoValue_SqsIO_Read.Builder().setMaxNumRecords(Long.MAX_VALUE).build(); } public static Write write() { return new AutoValue_SqsIO_Write.Builder().build(); } private SqsIO() {} /** * A {@link PTransform} to read/receive messages from SQS. See {@link SqsIO} for more information * on usage and configuration. */ @AutoValue public abstract static class Read extends PTransform<PBegin, PCollection<Message>> { @Nullable abstract String queueUrl(); abstract long maxNumRecords(); @Nullable abstract Duration maxReadTime(); abstract Builder toBuilder(); @AutoValue.Builder abstract static class Builder { abstract Builder setQueueUrl(String queueUrl); abstract Builder setMaxNumRecords(long maxNumRecords); abstract Builder setMaxReadTime(Duration maxReadTime); abstract Read build(); } /** * Define the max number of records received by the {@link Read}. When the max number of records * is lower than {@code Long.MAX_VALUE}, the {@link Read} will provide a bounded {@link * PCollection}. */ public Read withMaxNumRecords(long maxNumRecords) { return toBuilder().setMaxNumRecords(maxNumRecords).build(); } /** * Define the max read time (duration) while the {@link Read} will receive messages. When this * max read time is not null, the {@link Read} will provide a bounded {@link PCollection}. */ public Read withMaxReadTime(Duration maxReadTime) { return toBuilder().setMaxReadTime(maxReadTime).build(); } /** Define the queueUrl used by the {@link Read} to receive messages from SQS. */ public Read withQueueUrl(String queueUrl) { checkArgument(queueUrl != null, "queueUrl can not be null"); checkArgument(!queueUrl.isEmpty(), "queueUrl can not be empty"); return toBuilder().setQueueUrl(queueUrl).build(); } @Override public PCollection<Message> expand(PBegin input) { org.apache.beam.sdk.io.Read.Unbounded<Message> unbounded = org.apache.beam.sdk.io.Read.from( new SqsUnboundedSource( this, new SqsConfiguration(input.getPipeline().getOptions().as(AwsOptions.class)))); PTransform<PBegin, PCollection<Message>> transform = unbounded; if (maxNumRecords() < Long.MAX_VALUE || maxReadTime() != null) { transform = unbounded.withMaxReadTime(maxReadTime()).withMaxNumRecords(maxNumRecords()); } return input.getPipeline().apply(transform); } } /** * A {@link PTransform} to send messages to SQS. See {@link SqsIO} for more information on usage * and configuration. */ @AutoValue public abstract static class Write extends PTransform<PCollection<SendMessageRequest>, PDone> { abstract Builder toBuilder(); @AutoValue.Builder abstract static class Builder { abstract Write build(); } @Override public PDone expand(PCollection<SendMessageRequest> input) { input.apply( ParDo.of( new SqsWriteFn( new SqsConfiguration(input.getPipeline().getOptions().as(AwsOptions.class))))); return PDone.in(input.getPipeline()); } } private static class SqsWriteFn extends DoFn<SendMessageRequest, Void> { private final SqsConfiguration sqsConfiguration; private transient AmazonSQS sqs; SqsWriteFn(SqsConfiguration sqsConfiguration) { this.sqsConfiguration = sqsConfiguration; } @Setup public void setup() { sqs = AmazonSQSClientBuilder.standard() .withClientConfiguration(sqsConfiguration.getClientConfiguration()) .withCredentials(sqsConfiguration.getAwsCredentialsProvider()) .withRegion(sqsConfiguration.getAwsRegion()) .build(); } @ProcessElement public void processElement(ProcessContext processContext) throws Exception { sqs.sendMessage(processContext.element()); } @Teardown public void teardown() throws Exception { if (sqs != null) { sqs.shutdown(); } } } }
package org.sana.android.activity; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import android.app.ProgressDialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter;import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.content.pm.PackageInfo; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.net.Uri; import android.preference.PreferenceManager; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v4.app.FragmentActivity; import android.util.Log; import android.text.TextUtils; import android.widget.Toast; import org.sana.BuildConfig; import org.sana.R; import org.sana.android.Constants; import org.sana.android.app.Locales; import org.sana.android.app.State.Keys; import org.sana.android.content.DispatchResponseReceiver; import org.sana.android.content.Intents; import org.sana.android.content.Uris; import org.sana.android.fragment.AuthenticationDialogFragment.AuthenticationDialogListener; import org.sana.android.util.Logf; import org.sana.android.util.UriUtil; import org.sana.net.Response; /** * Base class that contains basic functionalities and behaviors that all * activities should do. * @author Sana Dev Team */ public abstract class BaseActivity extends FragmentActivity implements AuthenticationDialogListener{ public static final String TAG = BaseActivity.class.getSimpleName(); static // Dialog for prompting the user that a long operation is being performed. ProgressDialog mWaitDialog = null; protected String mLocale = null; protected boolean mForceLocale = false; /** * Finishes the calling activity and launches the activity contained in * <code>intent</code> * @param intent */ void switchActivity(Intent intent) { startActivity(intent); } // Session related public static final String INSTANCE_KEY = "instanceKey"; public static final String SESSION_KEY = "sessionKey"; private final AtomicBoolean mWaiting = new AtomicBoolean(false); protected String mDialogString = null; // instanceKey initialized to some random value for the instance; private String mInstanceKey = UUID.randomUUID().toString(); // Authenticated session key default is null; private String mSessionKey = null; protected Uri mSubject = Uri.EMPTY; protected Uri mEncounter = Uri.EMPTY; protected Uri mProcedure = Uri.EMPTY; protected Uri mObserver = Uri.EMPTY; protected Uri mTask = Uri.EMPTY; protected boolean mDebug = false; protected boolean mRoot = false; private boolean mUploadForeground = false; // State keys public static final String STATE_DIALOG = "__dialog"; public static final String STATE_ROLE = "__role"; protected BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { handleBroadcast(intent); } }; /** * Returns the value of the instance key which is created when the object is * instantiated. * @return */ protected String getInstanceKey(){ return mInstanceKey; } /** * Returns the value of the session key. Warning: any key returned must be * authenticated with the session service. * @return */ protected String getSessionKey(){ return mSessionKey; } /** * Sets the value of the session key. Warning: this method does not make * any atempt to validate whether the session is authenticated. * @param sessionKey */ protected void setSessionKey(String sessionKey){ mSessionKey = sessionKey; } /** * Writes the state fields for this component to a bundle. * Currently this writes the following from the Bundle * <ul> * <li>instance key</li> * <li>session key</li> * <li>current encounter</li> * <li>current subject</li> * <li>current observer</li> * <li>current procedure</li> * </ul> * @param outState */ protected void onSaveAppState(Bundle outState){ outState.putString(Keys.INSTANCE_KEY, mInstanceKey); outState.putString(Keys.SESSION_KEY, mSessionKey); outState.putParcelable(Intents.EXTRA_ENCOUNTER, mEncounter); outState.putParcelable(Intents.EXTRA_SUBJECT, mSubject); outState.putParcelable(Intents.EXTRA_PROCEDURE, mProcedure); outState.putParcelable(Intents.EXTRA_OBSERVER, mObserver); outState.putParcelable(Intents.EXTRA_TASK, mTask); outState.putBoolean(STATE_ROLE, mRoot); } /** * Writes the state fields for this component to an Intent as Extras. * Currently this writes the following from the Intent. * <ul> * <li>instance key</li> * <li>session key</li> * <li>current encounter</li> * <li>current subject</li> * <li>current observer</li> * <li>current procedure</li> * </ul> * @param outState */ protected void onSaveAppState(Intent outState){ outState.putExtra(Keys.INSTANCE_KEY, mInstanceKey); outState.putExtra(Keys.SESSION_KEY, mSessionKey); outState.putExtra(Intents.EXTRA_ENCOUNTER, mEncounter); outState.putExtra(Intents.EXTRA_SUBJECT, mSubject); outState.putExtra(Intents.EXTRA_PROCEDURE, mProcedure); outState.putExtra(Intents.EXTRA_OBSERVER, mObserver); outState.putExtra(Intents.EXTRA_TASK, mTask); outState.putExtra(STATE_ROLE, mRoot); } /** * Sets the state fields for this component from an Intent. * Currently this attempts to read the following extras from the * Intent. * <ul> * <li>instance key</li> * <li>session key</li> * <li>current encounter</li> * <li>current subject</li> * <li>current observer</li> * <li>current procedure</li> * </ul> * * @param inState */ protected void onUpdateAppState(Intent inState){ String k = inState.getStringExtra(Keys.INSTANCE_KEY); if(k != null) mInstanceKey = new String(k); k = inState.getStringExtra(Keys.SESSION_KEY); if(k!=null) mSessionKey = new String(k); Uri temp = inState.getParcelableExtra(Intents.EXTRA_ENCOUNTER); if(temp != null) mEncounter = UriUtil.copyInstance(temp); temp = inState.getParcelableExtra(Intents.EXTRA_SUBJECT); if(temp != null) mSubject = UriUtil.copyInstance(temp); temp = inState.getParcelableExtra(Intents.EXTRA_PROCEDURE); if(temp != null) mProcedure = UriUtil.copyInstance(temp); temp = inState.getParcelableExtra(Intents.EXTRA_OBSERVER); if(temp != null) mObserver = UriUtil.copyInstance(temp); temp = inState.getParcelableExtra(Intents.EXTRA_TASK); if(temp != null) mTask = UriUtil.copyInstance(temp); mRoot = inState.getBooleanExtra(STATE_ROLE, false); } /** * Sets the state fields for this component from a bundle. * Currently this attempts to read the following from the Bundle * <ul> * <li>instance key</li> * <li>session key</li> * <li>current encounter</li> * <li>current subject</li> * <li>current observer</li> * <li>current procedure</li> * </ul> * * @param inState */ protected void onUpdateAppState(Bundle inState){ String k = inState.getString(Keys.INSTANCE_KEY); if(k!=null) mInstanceKey = new String(k); k = inState.getString(Keys.SESSION_KEY); if(k!=null) mSessionKey = new String(k); Uri temp = inState.getParcelable(Intents.EXTRA_ENCOUNTER); if(temp != null) mEncounter = UriUtil.copyInstance(temp); temp = inState.getParcelable(Intents.EXTRA_SUBJECT); if(temp != null) mSubject = UriUtil.copyInstance(temp); temp = inState.getParcelable(Intents.EXTRA_PROCEDURE); if(temp != null) mProcedure = UriUtil.copyInstance(temp); temp = inState.getParcelable(Intents.EXTRA_OBSERVER); if(temp != null) mObserver = UriUtil.copyInstance(temp); temp = inState.getParcelable(Intents.EXTRA_TASK); if(temp != null) mTask = UriUtil.copyInstance(temp); mRoot = inState.getBoolean(STATE_ROLE); } protected void onClearAppState(){ mWaiting.set(false); mDialogString = null; mInstanceKey = UUID.randomUUID().toString(); mSessionKey = null; mSubject = Uri.EMPTY; mEncounter = Uri.EMPTY; mProcedure = Uri.EMPTY; mObserver = Uri.EMPTY; mTask = Uri.EMPTY; mRoot = false; } protected final void setCurrentCredentials(String username, String password){ Editor editor = PreferenceManager .getDefaultSharedPreferences(this).edit(); editor.putString( Constants.PREFERENCE_EMR_USERNAME, username); editor.putString( Constants.PREFERENCE_EMR_PASSWORD, password); editor.commit(); } protected final void clearCredentials(){ setCurrentCredentials("NULL","NULL"); } /** * Writes the app state fields as extras to an Intent. Activities * will still need to call setResult(RESULT_OK, data) as well as * write any other data they wish to the data Intent. * * @param data */ protected void setResultAppData(Intent data){ onSaveAppState(data); } public Bundle getState(){ Bundle state = new Bundle(); onSaveAppState(state); return state; } /* * (non-Javadoc) * @see com.actionbarsherlock.app.SherlockActivity#onSaveInstanceState(android.os.Bundle) */ @Override protected void onSaveInstanceState(Bundle savedInstanceState){ super.onSaveInstanceState(savedInstanceState); onSaveAppState(savedInstanceState); //onSaveDialog(savedInstanceState); } /* * (non-Javadoc) * @see com.actionbarsherlock.app.SherlockActivity#onRestoreInstanceState(android.os.Bundle) */ @Override protected void onRestoreInstanceState(Bundle savedInstanceState){ super.onRestoreInstanceState(savedInstanceState); onUpdateAppState(savedInstanceState); //onRestoreDialog(savedInstanceState); } /* * (non-Javadoc) * @see android.app.Activity#onCreate(android.os.Bundle) */ @Override protected void onCreate(Bundle savedInstanceState){ super.onCreate(savedInstanceState); Intent intent = getIntent(); // get the fields from the launch intent extras if(intent != null) onUpdateAppState(intent); // assume savedInstanceState is newer if(savedInstanceState != null) onUpdateAppState(savedInstanceState); mLocale = getString(R.string.force_locale); mForceLocale = !TextUtils.isEmpty(mLocale); Locales.updateLocale(this, mLocale); mUploadForeground = this.getResources().getBoolean(R.bool.cfg_upload_foreground); } /** * Displays a progress dialog fragment with the provided message. * @param message */ void showProgressDialogFragment(String message) { Log.i(TAG,"showProgressDialogFragment"); if (mWaitDialog != null && mWaitDialog.isShowing()) { hideProgressDialogFragment(); } // No need to create dialog if this is finishing if(isFinishing()) return; mDialogString = message; mWaitDialog = new ProgressDialog(this); mWaitDialog.setMessage(mDialogString); mWaitDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER); mWaitDialog.show(); mWaiting.set(true); } /** * Hides the progress dialog if it is shown. */ void hideProgressDialogFragment() { Log.i(TAG,"hideProgressDialogFragment"); mWaiting.set(false); if (mWaitDialog == null) { return; } // dismiss if finishing try{ if(isFinishing()){ mWaitDialog.dismiss(); //cancelProgressDialogFragment(); } else { mWaitDialog.dismiss(); } } catch (Exception e){ e.printStackTrace(); } } final void cancelProgressDialogFragment(){ Log.i(TAG,"cancelProgressDialogFragment"); mWaiting.set(false); mDialogString = null; try{ if(mWaitDialog != null)// && mWaitDialog.isShowing()) mWaitDialog.dismiss(); } catch (Exception e){ e.printStackTrace(); } } protected final boolean showProgressForeground(){ return mUploadForeground; } public void setData(Uri uri){ int code = Uris.getTypeDescriptor(uri); setData(code, uri); } public void setData(int code, Uri uri){ switch(code){ case Uris.ENCOUNTER_DIR: case Uris.ENCOUNTER_ITEM: case Uris.ENCOUNTER_UUID: mEncounter = uri; break; case Uris.OBSERVER_DIR: case Uris.OBSERVER_ITEM: case Uris.OBSERVER_UUID: mObserver = uri; break; case Uris.PROCEDURE_DIR: case Uris.PROCEDURE_ITEM: case Uris.PROCEDURE_UUID: mProcedure = uri; break; case Uris.SUBJECT_DIR: case Uris.SUBJECT_ITEM: case Uris.SUBJECT_UUID: mSubject = uri; break; case Uris.ENCOUNTER_TASK_DIR: case Uris.ENCOUNTER_TASK_ITEM: case Uris.ENCOUNTER_TASK_UUID: case Uris.OBSERVATION_TASK_DIR: case Uris.OBSERVATION_TASK_ITEM: case Uris.OBSERVATION_TASK_UUID: mTask = uri; break; default: break; } } protected void onPause(){ super.onPause(); if(isFinishing()) cancelProgressDialogFragment(); else hideProgressDialogFragment(); } @Override protected void onResume() { super.onResume(); if(mWaiting.get() && mDialogString != null) showProgressDialogFragment(mDialogString); } /* (non-Javadoc) * @see org.sana.android.fragment.AuthenticationDialogFragment.AuthenticationDialogListener#onDialogPositiveClick(android.support.v4.app.DialogFragment) */ @Override public void onDialogPositiveClick(DialogFragment dialog) { throw new UnsupportedOperationException(); } /* (non-Javadoc) * @see org.sana.android.fragment.AuthenticationDialogFragment.AuthenticationDialogListener#onDialogNegativeClick(android.support.v4.app.DialogFragment) */ @Override public void onDialogNegativeClick(DialogFragment dialog) { throw new UnsupportedOperationException(); } protected void dump(){ Logf.D(this.getComponentName().getShortClassName(),"dump()", String.format("{ 'encounter': '%s'," +" 'observer': '%s', 'subject': '%s', 'procedure': '%s', 'task': '%s' }", mEncounter, mObserver, mSubject, mProcedure, mTask)); } protected void dump(String method){ Logf.D(this.getComponentName().getShortClassName(),method+".dump()", String.format("{ 'encounter': '%s'," +" 'observer': '%s', 'subject': '%s', 'procedure': '%s', 'task': '%s' }", mEncounter, mObserver, mSubject, mProcedure, mTask)); } protected final void makeText(String text){ makeText(text, Toast.LENGTH_LONG); } protected final void makeText(String text, int duration){ Toast.makeText(this, text, duration).show(); } protected final void makeText(int resId){ Locales.updateLocale(this, getString(R.string.force_locale)); makeText(resId, Toast.LENGTH_SHORT); } protected final void makeText(int resId, int duration){ makeText(getString(resId), duration); } public int getVersion() { int v = 0; try { v = getPackageManager().getPackageInfo(getPackageName(), 0).versionCode; } catch (NameNotFoundException e) { } return v; } public String getBuildString() { String localVersion = "sana-v%s.%d.%s"; try { PackageInfo pi = getPackageManager().getPackageInfo( getPackageName(), 0); ApplicationInfo ai = getPackageManager().getApplicationInfo( getPackageName(), PackageManager.GET_META_DATA); Bundle metadata = ai.metaData; String local = (TextUtils.isEmpty(metadata.getString("local_build"))) ? "0" : metadata.getString("local_build"); return String.format(localVersion, pi.versionName, pi.versionCode, local); } catch (Exception e) { } return localVersion; } protected void handleBroadcast(Intent intent){ Log.i(TAG, "handleBroadcast(Intent)"); // Extract data included in the Intent } static final IntentFilter filter = new IntentFilter(); static{ filter.addAction(DispatchResponseReceiver.BROADCAST_RESPONSE); filter.addDataScheme("content"); filter.addDataAuthority("org.sana.provider", null); } public String getStringLocalized(int resId){ if(!TextUtils.isEmpty(mLocale)) Locales.updateLocale(this, mLocale); return super.getString(resId); } public void setContentViewLocalized(int resId){ if(mForceLocale) Locales.updateLocale(this, mLocale); super.setContentView(resId); } protected void onSaveDialog(Bundle savedInstanceState){ Bundle dialog = new Bundle(); dialog.putBoolean("mWaiting",mWaiting.get()); dialog.putString("mDialogString",mDialogString); savedInstanceState.putBundle("mDialog", dialog); } protected void onRestoreDialog(Bundle savedInstanceState){ Bundle dialog = savedInstanceState.getBundle("mDialog"); if(dialog != null){ mWaiting.set(dialog.getBoolean("mWaiting",false)); String msg = dialog.getString("mDialogString"); mDialogString = (!TextUtils.isEmpty(msg))? msg: null; } else { mWaiting.set(false); mDialogString = null; } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.appservice.v2016_03_01.implementation; import org.joda.time.DateTime; import java.util.UUID; import com.microsoft.azure.management.appservice.v2016_03_01.ResourceScopeType; import com.microsoft.azure.management.appservice.v2016_03_01.NotificationLevel; import com.microsoft.azure.management.appservice.v2016_03_01.Channels; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.azure.management.appservice.v2016_03_01.ProxyOnlyResource; /** * Represents a recommendation result generated by the recommendation engine. */ @JsonFlatten public class RecommendationInner extends ProxyOnlyResource { /** * Timestamp when this instance was created. */ @JsonProperty(value = "properties.creationTime") private DateTime creationTime; /** * A GUID value that each recommendation object is associated with. */ @JsonProperty(value = "properties.recommendationId") private UUID recommendationId; /** * Full ARM resource ID string that this recommendation object is * associated with. */ @JsonProperty(value = "properties.resourceId") private String resourceId; /** * Name of a resource type this recommendation applies, e.g. Subscription, * ServerFarm, Site. Possible values include: 'ServerFarm', 'Subscription', * 'WebSite'. */ @JsonProperty(value = "properties.resourceScope") private ResourceScopeType resourceScope; /** * Unique name of the rule. */ @JsonProperty(value = "properties.ruleName") private String ruleName; /** * UI friendly name of the rule (may not be unique). */ @JsonProperty(value = "properties.displayName") private String displayName; /** * Recommendation text. */ @JsonProperty(value = "properties.message") private String message; /** * Level indicating how critical this recommendation can impact. Possible * values include: 'Critical', 'Warning', 'Information', * 'NonUrgentSuggestion'. */ @JsonProperty(value = "properties.level") private NotificationLevel level; /** * List of channels that this recommendation can apply. Possible values * include: 'Notification', 'Api', 'Email', 'Webhook', 'All'. */ @JsonProperty(value = "properties.channels") private Channels channels; /** * The list of category tags that this recommendation belongs to. */ @JsonProperty(value = "properties.tags") private List<String> tags; /** * Name of action recommended by this object. */ @JsonProperty(value = "properties.actionName") private String actionName; /** * The beginning time in UTC of a range that the recommendation refers to. */ @JsonProperty(value = "properties.startTime") private DateTime startTime; /** * The end time in UTC of a range that the recommendation refers to. */ @JsonProperty(value = "properties.endTime") private DateTime endTime; /** * When to notify this recommendation next in UTC. Null means that this * will never be notified anymore. */ @JsonProperty(value = "properties.nextNotificationTime") private DateTime nextNotificationTime; /** * Date and time in UTC when this notification expires. */ @JsonProperty(value = "properties.notificationExpirationTime") private DateTime notificationExpirationTime; /** * Last timestamp in UTC this instance was actually notified. Null means * that this recommendation hasn't been notified yet. */ @JsonProperty(value = "properties.notifiedTime") private DateTime notifiedTime; /** * A metric value measured by the rule. */ @JsonProperty(value = "properties.score") private Double score; /** * True if this is associated with a dynamically added rule. */ @JsonProperty(value = "properties.isDynamic") private Boolean isDynamic; /** * Extension name of the portal if exists. */ @JsonProperty(value = "properties.extensionName") private String extensionName; /** * Deep link to a blade on the portal. */ @JsonProperty(value = "properties.bladeName") private String bladeName; /** * Forward link to an external document associated with the rule. */ @JsonProperty(value = "properties.forwardLink") private String forwardLink; /** * Get timestamp when this instance was created. * * @return the creationTime value */ public DateTime creationTime() { return this.creationTime; } /** * Set timestamp when this instance was created. * * @param creationTime the creationTime value to set * @return the RecommendationInner object itself. */ public RecommendationInner withCreationTime(DateTime creationTime) { this.creationTime = creationTime; return this; } /** * Get a GUID value that each recommendation object is associated with. * * @return the recommendationId value */ public UUID recommendationId() { return this.recommendationId; } /** * Set a GUID value that each recommendation object is associated with. * * @param recommendationId the recommendationId value to set * @return the RecommendationInner object itself. */ public RecommendationInner withRecommendationId(UUID recommendationId) { this.recommendationId = recommendationId; return this; } /** * Get full ARM resource ID string that this recommendation object is associated with. * * @return the resourceId value */ public String resourceId() { return this.resourceId; } /** * Set full ARM resource ID string that this recommendation object is associated with. * * @param resourceId the resourceId value to set * @return the RecommendationInner object itself. */ public RecommendationInner withResourceId(String resourceId) { this.resourceId = resourceId; return this; } /** * Get name of a resource type this recommendation applies, e.g. Subscription, ServerFarm, Site. Possible values include: 'ServerFarm', 'Subscription', 'WebSite'. * * @return the resourceScope value */ public ResourceScopeType resourceScope() { return this.resourceScope; } /** * Set name of a resource type this recommendation applies, e.g. Subscription, ServerFarm, Site. Possible values include: 'ServerFarm', 'Subscription', 'WebSite'. * * @param resourceScope the resourceScope value to set * @return the RecommendationInner object itself. */ public RecommendationInner withResourceScope(ResourceScopeType resourceScope) { this.resourceScope = resourceScope; return this; } /** * Get unique name of the rule. * * @return the ruleName value */ public String ruleName() { return this.ruleName; } /** * Set unique name of the rule. * * @param ruleName the ruleName value to set * @return the RecommendationInner object itself. */ public RecommendationInner withRuleName(String ruleName) { this.ruleName = ruleName; return this; } /** * Get uI friendly name of the rule (may not be unique). * * @return the displayName value */ public String displayName() { return this.displayName; } /** * Set uI friendly name of the rule (may not be unique). * * @param displayName the displayName value to set * @return the RecommendationInner object itself. */ public RecommendationInner withDisplayName(String displayName) { this.displayName = displayName; return this; } /** * Get recommendation text. * * @return the message value */ public String message() { return this.message; } /** * Set recommendation text. * * @param message the message value to set * @return the RecommendationInner object itself. */ public RecommendationInner withMessage(String message) { this.message = message; return this; } /** * Get level indicating how critical this recommendation can impact. Possible values include: 'Critical', 'Warning', 'Information', 'NonUrgentSuggestion'. * * @return the level value */ public NotificationLevel level() { return this.level; } /** * Set level indicating how critical this recommendation can impact. Possible values include: 'Critical', 'Warning', 'Information', 'NonUrgentSuggestion'. * * @param level the level value to set * @return the RecommendationInner object itself. */ public RecommendationInner withLevel(NotificationLevel level) { this.level = level; return this; } /** * Get list of channels that this recommendation can apply. Possible values include: 'Notification', 'Api', 'Email', 'Webhook', 'All'. * * @return the channels value */ public Channels channels() { return this.channels; } /** * Set list of channels that this recommendation can apply. Possible values include: 'Notification', 'Api', 'Email', 'Webhook', 'All'. * * @param channels the channels value to set * @return the RecommendationInner object itself. */ public RecommendationInner withChannels(Channels channels) { this.channels = channels; return this; } /** * Get the list of category tags that this recommendation belongs to. * * @return the tags value */ public List<String> tags() { return this.tags; } /** * Set the list of category tags that this recommendation belongs to. * * @param tags the tags value to set * @return the RecommendationInner object itself. */ public RecommendationInner withTags(List<String> tags) { this.tags = tags; return this; } /** * Get name of action recommended by this object. * * @return the actionName value */ public String actionName() { return this.actionName; } /** * Set name of action recommended by this object. * * @param actionName the actionName value to set * @return the RecommendationInner object itself. */ public RecommendationInner withActionName(String actionName) { this.actionName = actionName; return this; } /** * Get the beginning time in UTC of a range that the recommendation refers to. * * @return the startTime value */ public DateTime startTime() { return this.startTime; } /** * Set the beginning time in UTC of a range that the recommendation refers to. * * @param startTime the startTime value to set * @return the RecommendationInner object itself. */ public RecommendationInner withStartTime(DateTime startTime) { this.startTime = startTime; return this; } /** * Get the end time in UTC of a range that the recommendation refers to. * * @return the endTime value */ public DateTime endTime() { return this.endTime; } /** * Set the end time in UTC of a range that the recommendation refers to. * * @param endTime the endTime value to set * @return the RecommendationInner object itself. */ public RecommendationInner withEndTime(DateTime endTime) { this.endTime = endTime; return this; } /** * Get when to notify this recommendation next in UTC. Null means that this will never be notified anymore. * * @return the nextNotificationTime value */ public DateTime nextNotificationTime() { return this.nextNotificationTime; } /** * Set when to notify this recommendation next in UTC. Null means that this will never be notified anymore. * * @param nextNotificationTime the nextNotificationTime value to set * @return the RecommendationInner object itself. */ public RecommendationInner withNextNotificationTime(DateTime nextNotificationTime) { this.nextNotificationTime = nextNotificationTime; return this; } /** * Get date and time in UTC when this notification expires. * * @return the notificationExpirationTime value */ public DateTime notificationExpirationTime() { return this.notificationExpirationTime; } /** * Set date and time in UTC when this notification expires. * * @param notificationExpirationTime the notificationExpirationTime value to set * @return the RecommendationInner object itself. */ public RecommendationInner withNotificationExpirationTime(DateTime notificationExpirationTime) { this.notificationExpirationTime = notificationExpirationTime; return this; } /** * Get last timestamp in UTC this instance was actually notified. Null means that this recommendation hasn't been notified yet. * * @return the notifiedTime value */ public DateTime notifiedTime() { return this.notifiedTime; } /** * Set last timestamp in UTC this instance was actually notified. Null means that this recommendation hasn't been notified yet. * * @param notifiedTime the notifiedTime value to set * @return the RecommendationInner object itself. */ public RecommendationInner withNotifiedTime(DateTime notifiedTime) { this.notifiedTime = notifiedTime; return this; } /** * Get a metric value measured by the rule. * * @return the score value */ public Double score() { return this.score; } /** * Set a metric value measured by the rule. * * @param score the score value to set * @return the RecommendationInner object itself. */ public RecommendationInner withScore(Double score) { this.score = score; return this; } /** * Get true if this is associated with a dynamically added rule. * * @return the isDynamic value */ public Boolean isDynamic() { return this.isDynamic; } /** * Set true if this is associated with a dynamically added rule. * * @param isDynamic the isDynamic value to set * @return the RecommendationInner object itself. */ public RecommendationInner withIsDynamic(Boolean isDynamic) { this.isDynamic = isDynamic; return this; } /** * Get extension name of the portal if exists. * * @return the extensionName value */ public String extensionName() { return this.extensionName; } /** * Set extension name of the portal if exists. * * @param extensionName the extensionName value to set * @return the RecommendationInner object itself. */ public RecommendationInner withExtensionName(String extensionName) { this.extensionName = extensionName; return this; } /** * Get deep link to a blade on the portal. * * @return the bladeName value */ public String bladeName() { return this.bladeName; } /** * Set deep link to a blade on the portal. * * @param bladeName the bladeName value to set * @return the RecommendationInner object itself. */ public RecommendationInner withBladeName(String bladeName) { this.bladeName = bladeName; return this; } /** * Get forward link to an external document associated with the rule. * * @return the forwardLink value */ public String forwardLink() { return this.forwardLink; } /** * Set forward link to an external document associated with the rule. * * @param forwardLink the forwardLink value to set * @return the RecommendationInner object itself. */ public RecommendationInner withForwardLink(String forwardLink) { this.forwardLink = forwardLink; return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.core.data; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.jackrabbit.util.TransientFileFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class implements a LRU cache used by {@link CachingDataStore}. If cache * size exceeds limit, this cache goes in purge mode. In purge mode any * operation to cache is no-op. After purge cache size would be less than * cachePurgeResizeFactor * maximum size. */ public class LocalCache { /** * Logger instance. */ static final Logger LOG = LoggerFactory.getLogger(LocalCache.class); /** * The file names of the files that need to be deleted. */ final Set<String> toBeDeleted = new HashSet<String>(); /** * The filename Vs file size LRU cache. */ LRUCache cache; /** * The directory where the files are created. */ private final File directory; /** * The directory where tmp files are created. */ private final File tmp; /** * If true cache is in purgeMode and not available. All operation would be * no-op. */ private volatile boolean purgeMode; private AsyncUploadCache asyncUploadCache; private AtomicLong cacheMissCounter = new AtomicLong(); private AtomicLong cacheMissDuration = new AtomicLong(); /** * Build LRU cache of files located at 'path'. It uses lastModified property * of file to build LRU cache. If cache size exceeds limit size, this cache * goes in purge mode. In purge mode any operation to cache is no-op. * * @param path file system path * @param tmpPath temporary directory used by cache. * @param maxSizeInBytes maximum size of cache. * @param cachePurgeTrigFactor factor which triggers cache to purge mode. * That is if current size exceed (cachePurgeTrigFactor * maxSizeInBytes), the * cache will go in auto-purge mode. * @param cachePurgeResizeFactor after cache purge size of cache will be * just less (cachePurgeResizeFactor * maxSizeInBytes). * @param asyncUploadCache {@link AsyncUploadCache} */ public LocalCache(String path, String tmpPath, long maxSizeInBytes, double cachePurgeTrigFactor, double cachePurgeResizeFactor, AsyncUploadCache asyncUploadCache) { directory = new File(path); tmp = new File(tmpPath); LOG.info( "cachePurgeTrigFactor =[{}], cachePurgeResizeFactor =[{}], " + "cachePurgeTrigFactorSize =[{}], cachePurgeResizeFactorSize =[{}]", new Object[] { cachePurgeTrigFactor, cachePurgeResizeFactor, (cachePurgeTrigFactor * maxSizeInBytes), (cachePurgeResizeFactor * maxSizeInBytes) }); cache = new LRUCache(maxSizeInBytes, cachePurgeTrigFactor, cachePurgeResizeFactor); this.asyncUploadCache = asyncUploadCache; new Thread(new CacheBuildJob()).start(); } /** * Store an item in the cache and return the input stream. If cache is in * purgeMode or file doesn't exists, inputstream from a * {@link TransientFileFactory#createTransientFile(String, String, File)} is * returned. Otherwise inputStream from cached file is returned. This method * doesn't close the incoming inputstream. * * @param fileName the key of cache. * @param in {@link InputStream} * @return the (new) input stream. */ public InputStream store(String fileName, final InputStream in) throws IOException { fileName = fileName.replace("\\", "/"); File f = getFile(fileName); long length = 0; if (!f.exists() || isInPurgeMode()) { OutputStream out = null; File transFile = null; try { TransientFileFactory tff = TransientFileFactory.getInstance(); transFile = tff.createTransientFile("s3-", "tmp", tmp); out = new BufferedOutputStream(new FileOutputStream(transFile)); length = IOUtils.copyLarge(in, out); } finally { IOUtils.closeQuietly(out); } // rename the file to local fs cache if (canAdmitFile(length) && (f.getParentFile().exists() || f.getParentFile().mkdirs()) && transFile.renameTo(f) && f.exists()) { if (transFile.exists() && transFile.delete()) { LOG.info("tmp file [{}] not deleted successfully", transFile.getAbsolutePath()); } transFile = null; LOG.debug( "file [{}] doesn't exists. adding to local cache using inputstream.", fileName); cache.put(fileName, f.length()); } else { LOG.debug( "file [{}] doesn't exists. returning transient file [{}].", fileName, transFile.getAbsolutePath()); f = transFile; } } else { f.setLastModified(System.currentTimeMillis()); LOG.debug( "file [{}] exists. adding to local cache using inputstream.", fileName); cache.put(fileName, f.length()); } tryPurge(); return new LazyFileInputStream(f); } /** * Store an item along with file in cache. Cache size is increased by * {@link File#length()} If file already exists in cache, * {@link File#setLastModified(long)} is updated with current time. * * @param fileName the key of cache. * @param src file to be added to cache. */ public File store(String fileName, final File src) { try { return store(fileName, src, false).getFile(); } catch (IOException ioe) { LOG.warn("Exception in addding file [" + fileName + "] to local cache.", ioe); } return null; } /** * This method add file to {@link LocalCache} and tries that file can be * added to {@link AsyncUploadCache}. If file is added to * {@link AsyncUploadCache} successfully, it sets * {@link AsyncUploadCacheResult#setAsyncUpload(boolean)} to true. * * @param fileName name of the file. * @param src source file. * @param tryForAsyncUpload If true it tries to add fileName to * {@link AsyncUploadCache} * @return {@link AsyncUploadCacheResult}. This method sets * {@link AsyncUploadCacheResult#setAsyncUpload(boolean)} to true, if * fileName is added to {@link AsyncUploadCache} successfully else * it sets {@link AsyncUploadCacheResult#setAsyncUpload(boolean)} to * false. {@link AsyncUploadCacheResult#getFile()} contains cached * file, if it is added to {@link LocalCache} or original file. * @throws IOException */ public AsyncUploadCacheResult store(String fileName, File src, boolean tryForAsyncUpload) throws IOException { fileName = fileName.replace("\\", "/"); File dest = getFile(fileName); File parent = dest.getParentFile(); AsyncUploadCacheResult result = new AsyncUploadCacheResult(); result.setFile(src); result.setAsyncUpload(false); boolean destExists = false; if ((destExists = dest.exists()) || (src.exists() && !dest.exists() && !src.equals(dest) && canAdmitFile(src.length()) && (parent.exists() || parent.mkdirs()) && (src.renameTo(dest)))) { if (destExists) { dest.setLastModified(System.currentTimeMillis()); } LOG.debug("file [{}] moved to [{}] ", src.getAbsolutePath(), dest.getAbsolutePath()); LOG.debug( "file [{}] exists= [{}] added to local cache, isLastModified [{}]", new Object[] { dest.getAbsolutePath(), dest.exists(), destExists }); cache.put(fileName, dest.length()); result.setFile(dest); if (tryForAsyncUpload) { result.setAsyncUpload(asyncUploadCache.add(fileName).canAsyncUpload()); } } else { LOG.info("file [{}] exists= [{}] not added to local cache.", fileName, destExists); } tryPurge(); return result; } /** * Return the inputstream from from cache, or null if not in the cache. * * @param fileName name of file. * @return stream or null. */ public InputStream getIfStored(String fileName) throws IOException { File file = getFileIfStored(fileName); return file == null ? null : new LazyFileInputStream(file); } public File getFileIfStored(String fileName) throws IOException { fileName = fileName.replace("\\", "/"); File f = getFile(fileName); long diff = (System.currentTimeMillis() - cacheMissDuration.get()) / 1000; // logged at 5 minute interval minimum if (diff > 5 * 60) { LOG.info("local cache misses [{}] in [{}] sec", new Object[] { cacheMissCounter.getAndSet(0), diff }); cacheMissDuration.set(System.currentTimeMillis()); } // return file in purge mode = true and file present in asyncUploadCache // as asyncUploadCache's files will be not be deleted in cache purge. if (!f.exists() || (isInPurgeMode() && !asyncUploadCache.hasEntry(fileName, false))) { LOG.debug( "getFileIfStored returned: purgeMode=[{}], file=[{}] exists=[{}]", new Object[] { isInPurgeMode(), f.getAbsolutePath(), f.exists() }); cacheMissCounter.incrementAndGet(); return null; } else { // touch entry in LRU caches f.setLastModified(System.currentTimeMillis()); cache.get(fileName); return f; } } /** * Delete file from cache. Size of cache is reduced by file length. The * method is no-op if file doesn't exist in cache. * * @param fileName file name that need to be removed from cache. */ public void delete(String fileName) { if (isInPurgeMode()) { LOG.debug("purgeMode true :delete returned"); return; } fileName = fileName.replace("\\", "/"); cache.remove(fileName); } /** * Returns length of file if exists in cache else returns null. * @param fileName name of the file. */ public Long getFileLength(String fileName) { Long length = null; try { length = cache.get(fileName); if( length == null ) { File f = getFileIfStored(fileName); if (f != null) { length = f.length(); } } } catch (IOException ignore) { } return length; } /** * Close the cache. Cache maintain set of files which it was not able to * delete successfully. This method will an attempt to delete all * unsuccessful delete files. */ public void close() { LOG.debug("close"); deleteOldFiles(); } /** * Check if cache can admit file of given length. * @param length of the file. * @return true if yes else return false. */ private boolean canAdmitFile(final long length) { //order is important here boolean value = !isInPurgeMode() && (cache.canAdmitFile(length)); if (!value) { LOG.debug("cannot admit file of length=[{}] and currentSizeInBytes=[{}] ", length, cache.currentSizeInBytes); } return value; } /** * Return true if cache is in purge mode else return false. */ synchronized boolean isInPurgeMode() { return purgeMode; } /** * Set purge mode. If set to true all cache operation will be no-op. If set * to false, all operations to cache are available. * * @param purgeMode purge mode */ synchronized void setPurgeMode(final boolean purgeMode) { this.purgeMode = purgeMode; } File getFile(final String fileName) { return new File(directory, fileName); } private void deleteOldFiles() { int initialSize = toBeDeleted.size(); int count = 0; for (String fileName : new ArrayList<String>(toBeDeleted)) { fileName = fileName.replace("\\", "/"); if( cache.remove(fileName) != null) { count++; } } LOG.info("deleted [{}]/[{}] files.", count, initialSize); } /** * This method tries to delete a file. If it is not able to delete file due * to any reason, it add it toBeDeleted list. * * @param fileName name of the file which will be deleted. * @return true if this method deletes file successfuly else return false. */ boolean tryDelete(final String fileName) { LOG.debug("try deleting file [{}]", fileName); File f = getFile(fileName); if (f.exists() && f.delete()) { LOG.info("File [{}] deleted successfully", f.getAbsolutePath()); toBeDeleted.remove(fileName); while (true) { f = f.getParentFile(); if (f.equals(directory) || f.list().length > 0) { break; } // delete empty parent folders (except the main directory) f.delete(); } return true; } else if (f.exists()) { LOG.info("not able to delete file [{}]", f.getAbsolutePath()); toBeDeleted.add(fileName); return false; } return true; } static int maxSizeElements(final long bytes) { // after a CQ installation, the average item in // the data store is about 52 KB int count = (int) (bytes / 65535); count = Math.max(1024, count); count = Math.min(64 * 1024, count); return count; } /** * This method tries purging of local cache. It checks if local cache * has exceeded the defined limit then it triggers purge cache job in a * seperate thread. */ synchronized void tryPurge() { if (!isInPurgeMode() && cache.currentSizeInBytes > cache.cachePurgeTrigSize) { setPurgeMode(true); LOG.info( "cache.entries = [{}], currentSizeInBytes=[{}] exceeds cachePurgeTrigSize=[{}]", new Object[] { cache.size(), cache.currentSizeInBytes, cache.cachePurgeTrigSize }); new Thread(new PurgeJob()).start(); } else { LOG.debug( "currentSizeInBytes=[{}],cachePurgeTrigSize=[{}], isInPurgeMode =[{}]", new Object[] { cache.currentSizeInBytes, cache.cachePurgeTrigSize, isInPurgeMode() }); } } /** * A LRU based extension {@link LinkedHashMap}. The key is file name and * value is length of file. */ private class LRUCache extends LinkedHashMap<String, Long> { private static final long serialVersionUID = 1L; volatile long currentSizeInBytes; final long maxSizeInBytes; final long cachePurgeResize; final long cachePurgeTrigSize; LRUCache(final long maxSizeInBytes, final double cachePurgeTrigFactor, final double cachePurgeResizeFactor) { super(maxSizeElements(maxSizeInBytes), (float) 0.75, true); this.maxSizeInBytes = maxSizeInBytes; this.cachePurgeTrigSize = new Double(cachePurgeTrigFactor * maxSizeInBytes).longValue(); this.cachePurgeResize = new Double(cachePurgeResizeFactor * maxSizeInBytes).longValue(); } /** * Overridden {@link Map#remove(Object)} to delete corresponding file * from file system. */ @Override public synchronized Long remove(final Object key) { String fileName = (String) key; fileName = fileName.replace("\\", "/"); try { // not removing file from local cache, if there is in progress // async upload on it. if (asyncUploadCache.hasEntry(fileName, false)) { LOG.info( "AsyncUploadCache upload contains file [{}]. Not removing it from LocalCache.", fileName); return null; } } catch (IOException e) { LOG.debug("error: ", e); return null; } Long flength = null; if (tryDelete(fileName)) { flength = super.remove(key); if (flength != null) { LOG.debug("cache entry [{}], with size [{}] removed.", fileName, flength); currentSizeInBytes -= flength.longValue(); } } else if (!getFile(fileName).exists()) { // second attempt. remove from cache if file doesn't exists flength = super.remove(key); if (flength != null) { LOG.debug( "file not exists. cache entry [{}], with size [{}] removed.", fileName, flength); currentSizeInBytes -= flength.longValue(); } } else { LOG.info("not able to remove cache entry [{}], size [{}]", key, super.get(key)); } return flength; } @Override public Long put(final String fileName, final Long value) { if( isInPurgeMode()) { LOG.debug("cache is purge mode: put is no-op"); return null; } synchronized (this) { Long oldValue = cache.get(fileName); if (oldValue == null) { long flength = value.longValue(); currentSizeInBytes += flength; return super.put(fileName.replace("\\", "/"), value); } toBeDeleted.remove(fileName); return oldValue; } } @Override public Long get(Object key) { if( isInPurgeMode()) { LOG.debug("cache is purge mode: get is no-op"); return null; } synchronized (this) { return super.get(key); } } /** * This method check if cache can admit file of given length. * @param length length of file. * @return true if cache size + length is less than maxSize. */ synchronized boolean canAdmitFile(final long length) { return cache.currentSizeInBytes + length < cache.maxSizeInBytes; } } /** * This class performs purging of local cache. It implements * {@link Runnable} and should be invoked in a separate thread. */ private class PurgeJob implements Runnable { public PurgeJob() { // TODO Auto-generated constructor stub } /** * This method purges local cache till its size is less than * cacheResizefactor * maxSize */ @Override public void run() { try { synchronized (cache) { // first try to delete toBeDeleted files int initialSize = cache.size(); LOG.info(" cache purge job started. initial cache entries = [{}]", initialSize); for (String fileName : new ArrayList<String>(toBeDeleted)) { cache.remove(fileName); } Iterator<Map.Entry<String, Long>> itr = cache.entrySet().iterator(); while (itr.hasNext()) { Map.Entry<String, Long> entry = itr.next(); if (entry.getKey() != null) { if (cache.currentSizeInBytes > cache.cachePurgeResize) { cache.remove(entry.getKey()); itr = cache.entrySet().iterator(); } else { break; } } } LOG.info( " cache purge job completed: cleaned [{}] files and currentSizeInBytes = [{}]", (initialSize - cache.size()), cache.currentSizeInBytes); } } catch (Exception e) { LOG.error("error in purge jobs:", e); } finally { setPurgeMode(false); } } } /** * This class implements {@link Runnable} interface to build LRU cache * asynchronously. */ private class CacheBuildJob implements Runnable { public void run() { long startTime = System.currentTimeMillis(); ArrayList<File> allFiles = new ArrayList<File>(); Iterator<File> it = FileUtils.iterateFiles(directory, null, true); while (it.hasNext()) { File f = it.next(); allFiles.add(f); } long t1 = System.currentTimeMillis(); LOG.debug("Time taken to recursive [{}] took [{}] sec", allFiles.size(), ((t1 - startTime) / 1000)); String dataStorePath = directory.getAbsolutePath(); // convert to java path format dataStorePath = dataStorePath.replace("\\", "/"); LOG.info("directoryPath = " + dataStorePath); String tmpPath = tmp.getAbsolutePath(); tmpPath = tmpPath.replace("\\", "/"); LOG.debug("tmp path [{}]", tmpPath); long time = System.currentTimeMillis(); int count = 0; for (File f : allFiles) { if (f.exists()) { count++; String name = f.getPath(); String filePath = f.getAbsolutePath(); // convert to java path format name = name.replace("\\", "/"); filePath = filePath.replace("\\", "/"); // skipped any temp file if(filePath.startsWith(tmpPath) ) { LOG.info ("tmp file [{}] skipped ", filePath); continue; } if (filePath.startsWith(dataStorePath)) { name = filePath.substring(dataStorePath.length()); } if (name.startsWith("/") || name.startsWith("\\")) { name = name.substring(1); } store(name, f); long now = System.currentTimeMillis(); if (now > time + 10000) { LOG.info("Processed {" + (count) + "}/{" + allFiles.size() + "}"); time = now; } } } LOG.debug( "Processed [{}]/[{}], currentSizeInBytes = [{}], maxSizeInBytes = [{}], cache.filecount = [{}]", new Object[] { count, allFiles.size(), cache.currentSizeInBytes, cache.maxSizeInBytes, cache.size() }); long t3 = System.currentTimeMillis(); LOG.info("Time to build cache of [{}] files took [{}] sec", allFiles.size(), ((t3 - startTime) / 1000)); } } }
package view; import org.opencv.core.CvType; import org.opencv.core.Mat; import utils.ClickCallback; import utils.EventQueue; import utils.ValueCallback; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.awt.event.*; import java.awt.image.BufferedImage; import java.awt.image.DataBufferByte; @SuppressWarnings("serial") public class ImgWindow extends JPanel { JFrame frame; Graphics2D graphics; volatile BufferedImage img = null; volatile boolean clicked; volatile public int mouseX, mouseY; volatile public boolean closed; volatile EventQueue eventQueue = new EventQueue(); public ImgWindow(JFrame frame) { this.frame = frame; this.frame.addWindowListener(new WindowAdapter() { @Override public void windowClosed(WindowEvent e) { closed = true; } }); this.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { synchronized (this) { clicked = true; mouseX = e.getX(); mouseY = e.getY(); } } }); this.addMouseMotionListener(new MouseMotionListener() { @Override public void mouseMoved(MouseEvent e) { mouseX = e.getX(); mouseY = e.getY(); } @Override public void mouseDragged(MouseEvent e) { mouseX = e.getX(); mouseY = e.getY(); } }); } public void processEvents() { for (Runnable event : eventQueue.poll()) { event.run(); } } public void setImage(Mat mat) { if (mat == null) { img = null; } else { this.img = matToBufferedImage(mat); if (getWidth() != img.getWidth() || getHeight() != img.getHeight()) { setPreferredSize(new Dimension(img.getWidth(), img.getHeight())); frame.pack(); } } repaint(); } public boolean isClicked() { synchronized (this) { boolean res = clicked; clicked = false; return res; } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); BufferedImage tmp = img; if (tmp != null) { g.drawImage(tmp, 0, 0, tmp.getWidth(), tmp.getHeight(), this); } } public Graphics2D begin() { if (img != null) { graphics = img.createGraphics(); return graphics; } else { return null; } } public void end() { if (graphics != null) { graphics.dispose(); graphics = null; repaint(); } } public void setTitle(String title) { frame.setTitle(title); } public void moveToDisplay(int display) { GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment(); GraphicsDevice[] gd = ge.getScreenDevices(); if (display > -1 && display < gd.length) { frame.setLocation(gd[display].getDefaultConfiguration().getBounds().x, frame.getY()); } else if (gd.length > 0) { frame.setLocation(gd[0].getDefaultConfiguration().getBounds().x, frame.getY()); } else { throw new RuntimeException("No Screens Found"); } } public Mat createBuffer() { return new Mat(getHeight(), getWidth(), CvType.CV_8UC3); } public void maximize() { frame.setExtendedState(frame.getExtendedState() | JFrame.MAXIMIZED_BOTH); } public void addButton(final String text, final ClickCallback callback) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { JPanel panel = ImgWindow.this; JButton btn = new JButton(text); btn.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { eventQueue.add(new Runnable() { @Override public void run() { if (callback != null) callback.clicked(); } }); } }); panel.add(btn); panel.validate(); } }); } public void addSlider(final int min, final int max, final int initial, final ValueCallback callback) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { JPanel panel = ImgWindow.this; final JSlider slider = new JSlider(min, max, initial); slider.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { // if(!slider.getValueIsAdjusting()) { eventQueue.add(new Runnable() { @Override public void run() { if (callback != null) callback.valueChanged(slider.getValue()); } }); // } } }); panel.add(slider); panel.validate(); } }); } public void addLabel(final String text, final Color color) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { JPanel panel = ImgWindow.this; JLabel label = new JLabel(text); label.setForeground(color); panel.add(label); panel.validate(); } }); } public void clearControlls() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { ImgWindow.this.removeAll(); ImgWindow.this.validate(); } }); } public static ImgWindow newUndecoratedWindow() { return newWindow(null, true); } public static ImgWindow newWindow() { return newWindow(null, false); } public static ImgWindow newWindow(Mat mat) { return newWindow(mat, false); } public static ImgWindow newWindow(Mat mat, boolean undecorated) { JFrame frame = new JFrame(); frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); frame.setSize(400, 400); frame.setUndecorated(undecorated); ImgWindow panel = new ImgWindow(frame); panel.setLayout(new FlowLayout(FlowLayout.LEFT)); frame.add(panel); frame.setVisible(true); panel.setImage(mat); return panel; } public static BufferedImage matToBufferedImage(Mat matrix) { if (matrix.channels() == 1) { int cols = matrix.cols(); int rows = matrix.rows(); int elemSize = (int) matrix.elemSize(); byte[] data = new byte[cols * rows * elemSize]; int type; matrix.get(0, 0, data); switch (matrix.channels()) { case 1: type = BufferedImage.TYPE_BYTE_GRAY; break; case 3: type = BufferedImage.TYPE_3BYTE_BGR; // bgr to rgb byte b; for (int i = 0; i < data.length; i = i + 3) { b = data[i]; data[i] = data[i + 2]; data[i + 2] = b; } break; default: return null; } BufferedImage image2 = new BufferedImage(cols, rows, type); image2.getRaster().setDataElements(0, 0, cols, rows, data); return image2; } if (matrix.channels() == 3) { int width = matrix.width(), height = matrix.height(), channels = matrix.channels(); byte[] sourcePixels = new byte[width * height * channels]; matrix.get(0, 0, sourcePixels); // create new image and get reference to backing data BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length); return image; } return null; } }
package org.jsoup.nodes; import org.jsoup.helper.StringUtil; import org.jsoup.helper.Validate; import org.jsoup.parser.Parser; import org.jsoup.select.NodeTraversor; import org.jsoup.select.NodeVisitor; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** The base, abstract Node model. Elements, Documents, Comments etc are all Node instances. @author Jonathan Hedley, jonathan@hedley.net */ public abstract class Node implements Cloneable { Node parentNode; List<Node> childNodes; Attributes attributes; String baseUri; int siblingIndex; /** Create a new Node. @param baseUri base URI @param attributes attributes (not null, but may be empty) */ protected Node(String baseUri, Attributes attributes) { Validate.notNull(baseUri); Validate.notNull(attributes); childNodes = new ArrayList<Node>(4); this.baseUri = baseUri.trim(); this.attributes = attributes; } protected Node(String baseUri) { this(baseUri, new Attributes()); } /** * Default constructor. Doesn't setup base uri, children, or attributes; use with caution. */ protected Node() { childNodes = Collections.emptyList(); attributes = null; } /** Get the node name of this node. Use for debugging purposes and not logic switching (for that, use instanceof). @return node name */ public abstract String nodeName(); /** * Get an attribute's value by its key. * <p/> * To get an absolute URL from an attribute that may be a relative URL, prefix the key with <code><b>abs</b></code>, * which is a shortcut to the {@link #absUrl} method. * E.g.: <blockquote><code>String url = a.attr("abs:href");</code></blockquote> * @param attributeKey The attribute key. * @return The attribute, or empty string if not present (to avoid nulls). * @see #attributes() * @see #hasAttr(String) * @see #absUrl(String) */ public String attr(String attributeKey) { Validate.notNull(attributeKey); if (attributes.hasKey(attributeKey)) return attributes.get(attributeKey); else if (attributeKey.toLowerCase().startsWith("abs:")) return absUrl(attributeKey.substring("abs:".length())); else return ""; } /** * Get all of the element's attributes. * @return attributes (which implements iterable, in same order as presented in original HTML). */ public Attributes attributes() { return attributes; } /** * Set an attribute (key=value). If the attribute already exists, it is replaced. * @param attributeKey The attribute key. * @param attributeValue The attribute value. * @return this (for chaining) */ public Node attr(String attributeKey, String attributeValue) { attributes.put(attributeKey, attributeValue); return this; } /** * Test if this element has an attribute. * @param attributeKey The attribute key to check. * @return true if the attribute exists, false if not. */ public boolean hasAttr(String attributeKey) { Validate.notNull(attributeKey); if (attributeKey.startsWith("abs:")) { String key = attributeKey.substring("abs:".length()); if (attributes.hasKey(key) && !absUrl(key).equals("")) return true; } return attributes.hasKey(attributeKey); } /** * Remove an attribute from this element. * @param attributeKey The attribute to remove. * @return this (for chaining) */ public Node removeAttr(String attributeKey) { Validate.notNull(attributeKey); attributes.remove(attributeKey); return this; } /** Get the base URI of this node. @return base URI */ public String baseUri() { return baseUri; } /** Update the base URI of this node and all of its descendants. @param baseUri base URI to set */ public void setBaseUri(final String baseUri) { Validate.notNull(baseUri); traverse(new NodeVisitor() { public void head(Node node, int depth) { node.baseUri = baseUri; } public void tail(Node node, int depth) { } }); } /** * Get an absolute URL from a URL attribute that may be relative (i.e. an <code>&lt;a href></code> or * <code>&lt;img src></code>). * <p/> * E.g.: <code>String absUrl = linkEl.absUrl("href");</code> * <p/> * If the attribute value is already absolute (i.e. it starts with a protocol, like * <code>http://</code> or <code>https://</code> etc), and it successfully parses as a URL, the attribute is * returned directly. Otherwise, it is treated as a URL relative to the element's {@link #baseUri}, and made * absolute using that. * <p/> * As an alternate, you can use the {@link #attr} method with the <code>abs:</code> prefix, e.g.: * <code>String absUrl = linkEl.attr("abs:href");</code> * * @param attributeKey The attribute key * @return An absolute URL if one could be made, or an empty string (not null) if the attribute was missing or * could not be made successfully into a URL. * @see #attr * @see java.net.URL#URL(java.net.URL, String) */ public String absUrl(String attributeKey) { Validate.notEmpty(attributeKey); String relUrl = attr(attributeKey); if (!hasAttr(attributeKey)) { return ""; // nothing to make absolute with } else { URL base; try { try { base = new URL(baseUri); } catch (MalformedURLException e) { // the base is unsuitable, but the attribute may be abs on its own, so try that URL abs = new URL(relUrl); return abs.toExternalForm(); } // workaround: java resolves '//path/file + ?foo' to '//path/?foo', not '//path/file?foo' as desired if (relUrl.startsWith("?")) relUrl = base.getPath() + relUrl; URL abs = new URL(base, relUrl); return abs.toExternalForm(); } catch (MalformedURLException e) { return ""; } } } /** Get a child node by its 0-based index. @param index index of child node @return the child node at this index. Throws a {@code IndexOutOfBoundsException} if the index is out of bounds. */ public Node childNode(int index) { return childNodes.get(index); } /** Get this node's children. Presented as an unmodifiable list: new children can not be added, but the child nodes themselves can be manipulated. @return list of children. If no children, returns an empty list. */ public List<Node> childNodes() { return Collections.unmodifiableList(childNodes); } /** * Returns a deep copy of this node's children. Changes made to these nodes will not be reflected in the original * nodes * @return a deep copy of this node's children */ public List<Node> childNodesCopy() { List<Node> children = new ArrayList<Node>(childNodes.size()); for (Node node : childNodes) { children.add(node.clone()); } return children; } /** * Get the number of child nodes that this node holds. * @return the number of child nodes that this node holds. */ public final int childNodeSize() { return childNodes.size(); } protected Node[] childNodesAsArray() { return childNodes.toArray(new Node[childNodeSize()]); } /** Gets this node's parent node. @return parent node; or null if no parent. */ public Node parent() { return parentNode; } /** Gets this node's parent node. Node overridable by extending classes, so useful if you really just need the Node type. @return parent node; or null if no parent. */ public final Node parentNode() { return parentNode; } /** * Gets the Document associated with this Node. * @return the Document associated with this Node, or null if there is no such Document. */ public Document ownerDocument() { if (this instanceof Document) return (Document) this; else if (parentNode == null) return null; else return parentNode.ownerDocument(); } /** * Remove (delete) this node from the DOM tree. If this node has children, they are also removed. */ public void remove() { Validate.notNull(parentNode); parentNode.removeChild(this); } /** * Insert the specified HTML into the DOM before this node (i.e. as a preceding sibling). * @param html HTML to add before this node * @return this node, for chaining * @see #after(String) */ public Node before(String html) { addSiblingHtml(siblingIndex(), html); return this; } /** * Insert the specified node into the DOM before this node (i.e. as a preceding sibling). * @param node to add before this node * @return this node, for chaining * @see #after(Node) */ public Node before(Node node) { Validate.notNull(node); Validate.notNull(parentNode); parentNode.addChildren(siblingIndex(), node); return this; } /** * Insert the specified HTML into the DOM after this node (i.e. as a following sibling). * @param html HTML to add after this node * @return this node, for chaining * @see #before(String) */ public Node after(String html) { addSiblingHtml(siblingIndex()+1, html); return this; } /** * Insert the specified node into the DOM after this node (i.e. as a following sibling). * @param node to add after this node * @return this node, for chaining * @see #before(Node) */ public Node after(Node node) { Validate.notNull(node); Validate.notNull(parentNode); parentNode.addChildren(siblingIndex()+1, node); return this; } private void addSiblingHtml(int index, String html) { Validate.notNull(html); Validate.notNull(parentNode); Element context = parent() instanceof Element ? (Element) parent() : null; List<Node> nodes = Parser.parseFragment(html, context, baseUri()); parentNode.addChildren(index, nodes.toArray(new Node[nodes.size()])); } /** Wrap the supplied HTML around this node. @param html HTML to wrap around this element, e.g. {@code <div class="head"></div>}. Can be arbitrarily deep. @return this node, for chaining. */ public Node wrap(String html) { Validate.notEmpty(html); Element context = parent() instanceof Element ? (Element) parent() : null; List<Node> wrapChildren = Parser.parseFragment(html, context, baseUri()); Node wrapNode = wrapChildren.get(0); if (wrapNode == null || !(wrapNode instanceof Element)) // nothing to wrap with; noop return null; Element wrap = (Element) wrapNode; Element deepest = getDeepChild(wrap); parentNode.replaceChild(this, wrap); deepest.addChildren(this); // remainder (unbalanced wrap, like <div></div><p></p> -- The <p> is remainder if (wrapChildren.size() > 0) { for (int i = 0; i < wrapChildren.size(); i++) { Node remainder = wrapChildren.get(i); remainder.parentNode.removeChild(remainder); wrap.appendChild(remainder); } } return this; } /** * Removes this node from the DOM, and moves its children up into the node's parent. This has the effect of dropping * the node but keeping its children. * <p/> * For example, with the input html:<br/> * {@code <div>One <span>Two <b>Three</b></span></div>}<br/> * Calling {@code element.unwrap()} on the {@code span} element will result in the html:<br/> * {@code <div>One Two <b>Three</b></div>}<br/> * and the {@code "Two "} {@link TextNode} being returned. * @return the first child of this node, after the node has been unwrapped. Null if the node had no children. * @see #remove() * @see #wrap(String) */ public Node unwrap() { Validate.notNull(parentNode); int index = siblingIndex; Node firstChild = childNodes.size() > 0 ? childNodes.get(0) : null; parentNode.addChildren(index, this.childNodesAsArray()); this.remove(); return firstChild; } private Element getDeepChild(Element el) { List<Element> children = el.children(); if (children.size() > 0) return getDeepChild(children.get(0)); else return el; } /** * Replace this node in the DOM with the supplied node. * @param in the node that will will replace the existing node. */ public void replaceWith(Node in) { Validate.notNull(in); Validate.notNull(parentNode); parentNode.replaceChild(this, in); } protected void setParentNode(Node parentNode) { if (this.parentNode != null) this.parentNode.removeChild(this); this.parentNode = parentNode; } protected void replaceChild(Node out, Node in) { Validate.isTrue(out.parentNode == this); Validate.notNull(in); if (in.parentNode != null) in.parentNode.removeChild(in); Integer index = out.siblingIndex(); childNodes.set(index, in); in.parentNode = this; in.setSiblingIndex(index); out.parentNode = null; } protected void removeChild(Node out) { Validate.isTrue(out.parentNode == this); int index = out.siblingIndex(); childNodes.remove(index); reindexChildren(); out.parentNode = null; } protected void addChildren(Node... children) { //most used. short circuit addChildren(int), which hits reindex children and array copy for (Node child: children) { reparentChild(child); childNodes.add(child); child.setSiblingIndex(childNodes.size()-1); } } protected void addChildren(int index, Node... children) { Validate.noNullElements(children); for (int i = children.length - 1; i >= 0; i--) { Node in = children[i]; reparentChild(in); childNodes.add(index, in); } reindexChildren(); } private void reparentChild(Node child) { if (child.parentNode != null) child.parentNode.removeChild(child); child.setParentNode(this); } private void reindexChildren() { for (int i = 0; i < childNodes.size(); i++) { childNodes.get(i).setSiblingIndex(i); } } /** Retrieves this node's sibling nodes. Similar to {@link #childNodes() node.parent.childNodes()}, but does not include this node (a node is not a sibling of itself). @return node siblings. If the node has no parent, returns an empty list. */ public List<Node> siblingNodes() { if (parentNode == null) return Collections.emptyList(); List<Node> nodes = parentNode.childNodes; List<Node> siblings = new ArrayList<Node>(nodes.size() - 1); for (Node node: nodes) if (node != this) siblings.add(node); return siblings; } /** Get this node's next sibling. @return next sibling, or null if this is the last sibling */ public Node nextSibling() { if (parentNode == null) return null; // root List<Node> siblings = parentNode.childNodes; Integer index = siblingIndex(); Validate.notNull(index); if (siblings.size() > index+1) return siblings.get(index+1); else return null; } /** Get this node's previous sibling. @return the previous sibling, or null if this is the first sibling */ public Node previousSibling() { if (parentNode == null) return null; // root List<Node> siblings = parentNode.childNodes; Integer index = siblingIndex(); Validate.notNull(index); if (index > 0) return siblings.get(index-1); else return null; } /** * Get the list index of this node in its node sibling list. I.e. if this is the first node * sibling, returns 0. * @return position in node sibling list * @see org.jsoup.nodes.Element#elementSiblingIndex() */ public int siblingIndex() { return siblingIndex; } protected void setSiblingIndex(int siblingIndex) { this.siblingIndex = siblingIndex; } /** * Perform a depth-first traversal through this node and its descendants. * @param nodeVisitor the visitor callbacks to perform on each node * @return this node, for chaining */ public Node traverse(NodeVisitor nodeVisitor) { Validate.notNull(nodeVisitor); NodeTraversor traversor = new NodeTraversor(nodeVisitor); traversor.traverse(this); return this; } /** Get the outer HTML of this node. @return HTML */ public String outerHtml() { StringBuilder accum = new StringBuilder(128); outerHtml(accum); return accum.toString(); } protected void outerHtml(StringBuilder accum) { new NodeTraversor(new OuterHtmlVisitor(accum, getOutputSettings())).traverse(this); } // if this node has no document (or parent), retrieve the default output settings Document.OutputSettings getOutputSettings() { return ownerDocument() != null ? ownerDocument().outputSettings() : (new Document("")).outputSettings(); } /** Get the outer HTML of this node. @param accum accumulator to place HTML into */ abstract void outerHtmlHead(StringBuilder accum, int depth, Document.OutputSettings out); abstract void outerHtmlTail(StringBuilder accum, int depth, Document.OutputSettings out); public String toString() { return outerHtml(); } protected void indent(StringBuilder accum, int depth, Document.OutputSettings out) { accum.append("\n").append(StringUtil.padding(depth * out.indentAmount())); } @Override public boolean equals(Object o) { if (this == o) return true; // todo: have nodes hold a child index, compare against that and parent (not children) return false; } @Override public int hashCode() { int result = parentNode != null ? parentNode.hashCode() : 0; // not children, or will block stack as they go back up to parent) result = 31 * result + (attributes != null ? attributes.hashCode() : 0); return result; } /** * Create a stand-alone, deep copy of this node, and all of its children. The cloned node will have no siblings or * parent node. As a stand-alone object, any changes made to the clone or any of its children will not impact the * original node. * <p> * The cloned node may be adopted into another Document or node structure using {@link Element#appendChild(Node)}. * @return stand-alone cloned node */ @Override public Node clone() { Node thisClone = doClone(null); // splits for orphan // Queue up nodes that need their children cloned (BFS). LinkedList<Node> nodesToProcess = new LinkedList<Node>(); nodesToProcess.add(thisClone); while (!nodesToProcess.isEmpty()) { Node currParent = nodesToProcess.remove(); for (int i = 0; i < currParent.childNodes.size(); i++) { Node childClone = currParent.childNodes.get(i).doClone(currParent); currParent.childNodes.set(i, childClone); nodesToProcess.add(childClone); } } return thisClone; } /* * Return a clone of the node using the given parent (which can be null). * Not a deep copy of children. */ protected Node doClone(Node parent) { Node clone; try { clone = (Node) super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } clone.parentNode = parent; // can be null, to create an orphan split clone.siblingIndex = parent == null ? 0 : siblingIndex; clone.attributes = attributes != null ? attributes.clone() : null; clone.baseUri = baseUri; clone.childNodes = new ArrayList<Node>(childNodes.size()); for (Node child: childNodes) clone.childNodes.add(child); return clone; } private static class OuterHtmlVisitor implements NodeVisitor { private StringBuilder accum; private Document.OutputSettings out; OuterHtmlVisitor(StringBuilder accum, Document.OutputSettings out) { this.accum = accum; this.out = out; } public void head(Node node, int depth) { node.outerHtmlHead(accum, depth, out); } public void tail(Node node, int depth) { if (!node.nodeName().equals("#text")) // saves a void hit. node.outerHtmlTail(accum, depth, out); } } }
// Portions copyright 2002, Google, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package co.wakarimasen.ceredux.util; // This code was converted from code at http://iharder.sourceforge.net/base64/ // Lots of extraneous features were removed. /* The original code said: * <p> * I am placing this code in the Public Domain. Do with it as you will. * This software comes with no guarantees or warranties but with * plenty of well-wishing instead! * Please visit * <a href="http://iharder.net/xmlizable">http://iharder.net/xmlizable</a> * periodically to check for updates or to contribute improvements. * </p> * * @author Robert Harder * @author rharder@usa.net * @version 1.3 */ /** * Base64 converter class. This code is not a complete MIME encoder; * it simply converts binary data to base64 data and back. * * <p>Note {@link CharBase64} is a GWT-compatible implementation of this * class. */ public class Base64 { /** Specify encoding (value is {@code true}). */ public final static boolean ENCODE = true; /** Specify decoding (value is {@code false}). */ public final static boolean DECODE = false; /** The equals sign (=) as a byte. */ private final static byte EQUALS_SIGN = (byte) '='; /** The new line character (\n) as a byte. */ private final static byte NEW_LINE = (byte) '\n'; /** * The 64 valid Base64 values. */ private final static byte[] ALPHABET = {(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '+', (byte) '/'}; /** * The 64 valid web safe Base64 values. */ private final static byte[] WEBSAFE_ALPHABET = {(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '-', (byte) '_'}; /** * Translates a Base64 value to either its 6-bit reconstruction value * or a negative number indicating some other meaning. **/ private final static byte[] DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 -5, -5, // Whitespace: Tab and Linefeed -9, -9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 -9, -9, -9, -9, -9, // Decimal 27 - 31 -5, // Whitespace: Space -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42 62, // Plus sign at decimal 43 -9, -9, -9, // Decimal 44 - 46 63, // Slash at decimal 47 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine -9, -9, -9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9, -9, -9, // Decimal 62 - 64 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' -9, -9, -9, -9, -9, -9, // Decimal 91 - 96 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' -9, -9, -9, -9, -9 // Decimal 123 - 127 /* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; /** The web safe decodabet */ private final static byte[] WEBSAFE_DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 -5, -5, // Whitespace: Tab and Linefeed -9, -9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 -9, -9, -9, -9, -9, // Decimal 27 - 31 -5, // Whitespace: Space -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 44 62, // Dash '-' sign at decimal 45 -9, -9, // Decimal 46-47 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine -9, -9, -9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9, -9, -9, // Decimal 62 - 64 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' -9, -9, -9, -9, // Decimal 91-94 63, // Underscore '_' at decimal 95 -9, // Decimal 96 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' -9, -9, -9, -9, -9 // Decimal 123 - 127 /* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; // Indicates white space in encoding private final static byte WHITE_SPACE_ENC = -5; // Indicates equals sign in encoding private final static byte EQUALS_SIGN_ENC = -1; /** Defeats instantiation. */ private Base64() { } /* ******** E N C O D I N G M E T H O D S ******** */ /** * Encodes up to three bytes of the array <var>source</var> * and writes the resulting four Base64 bytes to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accommodate <var>srcOffset</var> + 3 for * the <var>source</var> array or <var>destOffset</var> + 4 for * the <var>destination</var> array. * The actual number of significant bytes in your array is * given by <var>numSigBytes</var>. * * @param source the array to convert * @param srcOffset the index where conversion begins * @param numSigBytes the number of significant bytes in your array * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @param alphabet is the encoding alphabet * @return the <var>destination</var> array * @since 1.3 */ private static byte[] encode3to4(byte[] source, int srcOffset, int numSigBytes, byte[] destination, int destOffset, byte[] alphabet) { // 1 2 3 // 01234567890123456789012345678901 Bit position // --------000000001111111122222222 Array position from threeBytes // --------| || || || | Six bit groups to index alphabet // >>18 >>12 >> 6 >> 0 Right shift necessary // 0x3f 0x3f 0x3f Additional AND // Create buffer with zero-padding if there are only one or two // significant bytes passed in the array. // We have to shift left 24 in order to flush out the 1's that appear // when Java treats a value as negative that is cast from a byte to an int. int inBuff = (numSigBytes > 0 ? ((source[srcOffset] << 24) >>> 8) : 0) | (numSigBytes > 1 ? ((source[srcOffset + 1] << 24) >>> 16) : 0) | (numSigBytes > 2 ? ((source[srcOffset + 2] << 24) >>> 24) : 0); switch (numSigBytes) { case 3: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f]; destination[destOffset + 3] = alphabet[(inBuff) & 0x3f]; return destination; case 2: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f]; destination[destOffset + 3] = EQUALS_SIGN; return destination; case 1: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = EQUALS_SIGN; destination[destOffset + 3] = EQUALS_SIGN; return destination; default: return destination; } // end switch } // end encode3to4 /** * Encodes a byte array into Base64 notation. * Equivalent to calling * {@code encodeBytes(source, 0, source.length)} * * @param source The data to convert * @since 1.4 */ public static String encode(byte[] source) { return encode(source, 0, source.length, ALPHABET, true); } /** * Encodes a byte array into web safe Base64 notation. * * @param source The data to convert * @param doPadding is {@code true} to pad result with '=' chars * if it does not fall on 3 byte boundaries */ public static String encodeWebSafe(byte[] source, boolean doPadding) { return encode(source, 0, source.length, WEBSAFE_ALPHABET, doPadding); } /** * Encodes a byte array into Base64 notation. * * @param source the data to convert * @param off offset in array where conversion should begin * @param len length of data to convert * @param alphabet the encoding alphabet * @param doPadding is {@code true} to pad result with '=' chars * if it does not fall on 3 byte boundaries * @since 1.4 */ public static String encode(byte[] source, int off, int len, byte[] alphabet, boolean doPadding) { byte[] outBuff = encode(source, off, len, alphabet, Integer.MAX_VALUE); int outLen = outBuff.length; // If doPadding is false, set length to truncate '=' // padding characters while (doPadding == false && outLen > 0) { if (outBuff[outLen - 1] != '=') { break; } outLen -= 1; } return new String(outBuff, 0, outLen); } /** * Encodes a byte array into Base64 notation. * * @param source the data to convert * @param off offset in array where conversion should begin * @param len length of data to convert * @param alphabet is the encoding alphabet * @param maxLineLength maximum length of one line. * @return the BASE64-encoded byte array */ public static byte[] encode(byte[] source, int off, int len, byte[] alphabet, int maxLineLength) { int lenDiv3 = (len + 2) / 3; // ceil(len / 3) int len43 = lenDiv3 * 4; byte[] outBuff = new byte[len43 // Main 4:3 + (len43 / maxLineLength)]; // New lines int d = 0; int e = 0; int len2 = len - 2; int lineLength = 0; for (; d < len2; d += 3, e += 4) { // The following block of code is the same as // encode3to4( source, d + off, 3, outBuff, e, alphabet ); // but inlined for faster encoding (~20% improvement) int inBuff = ((source[d + off] << 24) >>> 8) | ((source[d + 1 + off] << 24) >>> 16) | ((source[d + 2 + off] << 24) >>> 24); outBuff[e] = alphabet[(inBuff >>> 18)]; outBuff[e + 1] = alphabet[(inBuff >>> 12) & 0x3f]; outBuff[e + 2] = alphabet[(inBuff >>> 6) & 0x3f]; outBuff[e + 3] = alphabet[(inBuff) & 0x3f]; lineLength += 4; if (lineLength == maxLineLength) { outBuff[e + 4] = NEW_LINE; e++; lineLength = 0; } // end if: end of line } // end for: each piece of array if (d < len) { encode3to4(source, d + off, len - d, outBuff, e, alphabet); lineLength += 4; if (lineLength == maxLineLength) { // Add a last newline outBuff[e + 4] = NEW_LINE; e++; } e += 4; } assert (e == outBuff.length); return outBuff; } /* ******** D E C O D I N G M E T H O D S ******** */ /** * Decodes four bytes from array <var>source</var> * and writes the resulting bytes (up to three of them) * to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accommodate <var>srcOffset</var> + 4 for * the <var>source</var> array or <var>destOffset</var> + 3 for * the <var>destination</var> array. * This method returns the actual number of bytes that * were converted from the Base64 encoding. * * * @param source the array to convert * @param srcOffset the index where conversion begins * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @param decodabet the decodabet for decoding Base64 content * @return the number of decoded bytes converted * @since 1.3 */ private static int decode4to3(byte[] source, int srcOffset, byte[] destination, int destOffset, byte[] decodabet) { // Example: Dk== if (source[srcOffset + 2] == EQUALS_SIGN) { int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12); destination[destOffset] = (byte) (outBuff >>> 16); return 1; } else if (source[srcOffset + 3] == EQUALS_SIGN) { // Example: DkL= int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12) | ((decodabet[source[srcOffset + 2]] << 24) >>> 18); destination[destOffset] = (byte) (outBuff >>> 16); destination[destOffset + 1] = (byte) (outBuff >>> 8); return 2; } else { // Example: DkLE int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12) | ((decodabet[source[srcOffset + 2]] << 24) >>> 18) | ((decodabet[source[srcOffset + 3]] << 24) >>> 24); destination[destOffset] = (byte) (outBuff >> 16); destination[destOffset + 1] = (byte) (outBuff >> 8); destination[destOffset + 2] = (byte) (outBuff); return 3; } } // end decodeToBytes /** * Decodes data from Base64 notation. * * @param s the string to decode (decoded in default encoding) * @return the decoded data * @since 1.4 */ public static byte[] decode(String s) throws Base64DecoderException { byte[] bytes = s.getBytes(); return decode(bytes, 0, bytes.length); } /** * Decodes data from web safe Base64 notation. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param s the string to decode (decoded in default encoding) * @return the decoded data */ public static byte[] decodeWebSafe(String s) throws Base64DecoderException { byte[] bytes = s.getBytes(); return decodeWebSafe(bytes, 0, bytes.length); } /** * Decodes Base64 content in byte array format and returns * the decoded byte array. * * @param source The Base64 encoded data * @return decoded data * @since 1.3 * @throws Base64DecoderException */ public static byte[] decode(byte[] source) throws Base64DecoderException { return decode(source, 0, source.length); } /** * Decodes web safe Base64 content in byte array format and returns * the decoded data. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param source the string to decode (decoded in default encoding) * @return the decoded data */ public static byte[] decodeWebSafe(byte[] source) throws Base64DecoderException { return decodeWebSafe(source, 0, source.length); } /** * Decodes Base64 content in byte array format and returns * the decoded byte array. * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @return decoded data * @since 1.3 * @throws Base64DecoderException */ public static byte[] decode(byte[] source, int off, int len) throws Base64DecoderException { return decode(source, off, len, DECODABET); } /** * Decodes web safe Base64 content in byte array format and returns * the decoded byte array. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @return decoded data */ public static byte[] decodeWebSafe(byte[] source, int off, int len) throws Base64DecoderException { return decode(source, off, len, WEBSAFE_DECODABET); } /** * Decodes Base64 content using the supplied decodabet and returns * the decoded byte array. * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @param decodabet the decodabet for decoding Base64 content * @return decoded data */ public static byte[] decode(byte[] source, int off, int len, byte[] decodabet) throws Base64DecoderException { int len34 = len * 3 / 4; byte[] outBuff = new byte[2 + len34]; // Upper limit on size of output int outBuffPosn = 0; byte[] b4 = new byte[4]; int b4Posn = 0; int i = 0; byte sbiCrop = 0; byte sbiDecode = 0; for (i = 0; i < len; i++) { sbiCrop = (byte) (source[i + off] & 0x7f); // Only the low seven bits sbiDecode = decodabet[sbiCrop]; if (sbiDecode >= WHITE_SPACE_ENC) { // White space Equals sign or better if (sbiDecode >= EQUALS_SIGN_ENC) { // An equals sign (for padding) must not occur at position 0 or 1 // and must be the last byte[s] in the encoded value if (sbiCrop == EQUALS_SIGN) { int bytesLeft = len - i; byte lastByte = (byte) (source[len - 1 + off] & 0x7f); if (b4Posn == 0 || b4Posn == 1) { throw new Base64DecoderException( "invalid padding byte '=' at byte offset " + i); } else if ((b4Posn == 3 && bytesLeft > 2) || (b4Posn == 4 && bytesLeft > 1)) { throw new Base64DecoderException( "padding byte '=' falsely signals end of encoded value " + "at offset " + i); } else if (lastByte != EQUALS_SIGN && lastByte != NEW_LINE) { throw new Base64DecoderException( "encoded value has invalid trailing byte"); } break; } b4[b4Posn++] = sbiCrop; if (b4Posn == 4) { outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet); b4Posn = 0; } } } else { throw new Base64DecoderException("Bad Base64 input character at " + i + ": " + source[i + off] + "(decimal)"); } } // Because web safe encoding allows non padding base64 encodes, we // need to pad the rest of the b4 buffer with equal signs when // b4Posn != 0. There can be at most 2 equal signs at the end of // four characters, so the b4 buffer must have two or three // characters. This also catches the case where the input is // padded with EQUALS_SIGN if (b4Posn != 0) { if (b4Posn == 1) { throw new Base64DecoderException("single trailing character at offset " + (len - 1)); } b4[b4Posn++] = EQUALS_SIGN; outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet); } byte[] out = new byte[outBuffPosn]; System.arraycopy(outBuff, 0, out, 0, outBuffPosn); return out; } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms; // Start of user code for imports import java.util.ArrayList; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.forms.widgets.Form; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.ScrolledForm; import org.eclipse.ui.forms.widgets.Section; import org.wso2.developerstudio.eclipse.gmf.esb.parts.DBReportMediatorOutputConnectorPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages; // End of user code /** * * */ public class DBReportMediatorOutputConnectorPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, DBReportMediatorOutputConnectorPropertiesEditionPart { protected ReferencesTable commentMediators; protected List<ViewerFilter> commentMediatorsBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> commentMediatorsFilters = new ArrayList<ViewerFilter>(); /** * For {@link ISection} use only. */ public DBReportMediatorOutputConnectorPropertiesEditionPartForm() { super(); } /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public DBReportMediatorOutputConnectorPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit) * */ public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) { ScrolledForm scrolledForm = widgetFactory.createScrolledForm(parent); Form form = scrolledForm.getForm(); view = form.getBody(); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(widgetFactory, view); return scrolledForm; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart# * createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite) * */ public void createControls(final FormToolkit widgetFactory, Composite view) { CompositionSequence dBReportMediatorOutputConnectorStep = new BindingCompositionSequence(propertiesEditionComponent); dBReportMediatorOutputConnectorStep .addStep(EsbViewsRepository.DBReportMediatorOutputConnector.Properties.class) .addStep(EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators); composer = new PartComposer(dBReportMediatorOutputConnectorStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EsbViewsRepository.DBReportMediatorOutputConnector.Properties.class) { return createPropertiesGroup(widgetFactory, parent); } if (key == EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators) { return createCommentMediatorsTableComposition(widgetFactory, parent); } return parent; } }; composer.compose(view); } /** * */ protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) { Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED); propertiesSection.setText(EsbMessages.DBReportMediatorOutputConnectorPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL); propertiesSectionData.horizontalSpan = 3; propertiesSection.setLayoutData(propertiesSectionData); Composite propertiesGroup = widgetFactory.createComposite(propertiesSection); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); propertiesSection.setClient(propertiesGroup); return propertiesGroup; } /** * @param container * */ protected Composite createCommentMediatorsTableComposition(FormToolkit widgetFactory, Composite parent) { this.commentMediators = new ReferencesTable(getDescription(EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators, EsbMessages.DBReportMediatorOutputConnectorPropertiesEditionPart_CommentMediatorsLabel), new ReferencesTableListener() { public void handleAdd() { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(DBReportMediatorOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null)); commentMediators.refresh(); } public void handleEdit(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(DBReportMediatorOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element)); commentMediators.refresh(); } public void handleMove(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(DBReportMediatorOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); commentMediators.refresh(); } public void handleRemove(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(DBReportMediatorOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); commentMediators.refresh(); } public void navigateTo(EObject element) { } }); for (ViewerFilter filter : this.commentMediatorsFilters) { this.commentMediators.addFilter(filter); } this.commentMediators.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators, EsbViewsRepository.FORM_KIND)); this.commentMediators.createControls(parent, widgetFactory); this.commentMediators.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(DBReportMediatorOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData commentMediatorsData = new GridData(GridData.FILL_HORIZONTAL); commentMediatorsData.horizontalSpan = 3; this.commentMediators.setLayoutData(commentMediatorsData); this.commentMediators.setLowerBound(0); this.commentMediators.setUpperBound(-1); commentMediators.setID(EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators); commentMediators.setEEFType("eef::AdvancedTableComposition"); //$NON-NLS-1$ // Start of user code for createCommentMediatorsTableComposition // End of user code return parent; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.DBReportMediatorOutputConnectorPropertiesEditionPart#initCommentMediators(EObject current, EReference containingFeature, EReference feature) */ public void initCommentMediators(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); commentMediators.setContentProvider(contentProvider); commentMediators.setInput(settings); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.DBReportMediatorOutputConnector.Properties.commentMediators); if (eefElementEditorReadOnlyState && commentMediators.isEnabled()) { commentMediators.setEnabled(false); commentMediators.setToolTipText(EsbMessages.DBReportMediatorOutputConnector_ReadOnly); } else if (!eefElementEditorReadOnlyState && !commentMediators.isEnabled()) { commentMediators.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.DBReportMediatorOutputConnectorPropertiesEditionPart#updateCommentMediators() * */ public void updateCommentMediators() { commentMediators.refresh(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.DBReportMediatorOutputConnectorPropertiesEditionPart#addFilterCommentMediators(ViewerFilter filter) * */ public void addFilterToCommentMediators(ViewerFilter filter) { commentMediatorsFilters.add(filter); if (this.commentMediators != null) { this.commentMediators.addFilter(filter); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.DBReportMediatorOutputConnectorPropertiesEditionPart#addBusinessFilterCommentMediators(ViewerFilter filter) * */ public void addBusinessFilterToCommentMediators(ViewerFilter filter) { commentMediatorsBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.DBReportMediatorOutputConnectorPropertiesEditionPart#isContainedInCommentMediatorsTable(EObject element) * */ public boolean isContainedInCommentMediatorsTable(EObject element) { return ((ReferencesTableSettings)commentMediators.getInput()).contains(element); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EsbMessages.DBReportMediatorOutputConnector_Part_Title; } // Start of user code additional methods // End of user code }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.aws.ec2.compute.strategy; import com.google.common.base.Predicates; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.gson.Gson; import com.google.inject.Guice; import org.jclouds.compute.config.BaseComputeServiceContextModule; import org.jclouds.compute.domain.ImageBuilder; import org.jclouds.compute.domain.OperatingSystem; import org.jclouds.compute.domain.OsFamily; import org.jclouds.compute.reference.ComputeServiceConstants; import org.jclouds.domain.Location; import org.jclouds.domain.LocationBuilder; import org.jclouds.domain.LocationScope; import org.jclouds.domain.LoginCredentials; import org.jclouds.ec2.compute.config.EC2ComputeServiceDependenciesModule; import org.jclouds.ec2.compute.functions.EC2ImageParser; import org.jclouds.ec2.compute.strategy.EC2PopulateDefaultLoginCredentialsForImageStrategy; import org.jclouds.ec2.domain.Image; import org.jclouds.ec2.xml.DescribeImagesResponseHandlerTest; import org.jclouds.json.Json; import org.jclouds.json.config.GsonModule; import org.testng.annotations.Test; import java.util.Map; import java.util.Set; import static org.testng.Assert.assertEquals; @Test(groups = "unit", testName = "AWSEC2ImageParserTest") public class AWSEC2ImageParserTest { public void testParseAlesticCanonicalImage() { Set<org.jclouds.compute.domain.Image> result = convertImages("/alestic_canonical.xml"); assertEquals( Iterables.get(result, 0), new ImageBuilder() .operatingSystem( new OperatingSystem.Builder().family(OsFamily.UBUNTU).arch("paravirtual").version("8.04") .description("ubuntu-images-us/ubuntu-hardy-8.04-i386-server-20091130.manifest.xml") .is64Bit(false).build()) .description("ubuntu-images-us/ubuntu-hardy-8.04-i386-server-20091130.manifest.xml") .defaultCredentials(LoginCredentials.builder().user("ubuntu").build()).id("us-east-1/ami-7e28ca17") .providerId("ami-7e28ca17").location(defaultLocation).version("20091130") .userMetadata(ImmutableMap.of( "owner", "099720109477", "rootDeviceType", "instance-store", "virtualizationType", "paravirtual", "hypervisor", "xen")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE) .backendStatus("available") .build()); assertEquals(Iterables.get(result, 0).getStatus(), org.jclouds.compute.domain.Image.Status.AVAILABLE); assertEquals( Iterables.get(result, 4), new ImageBuilder() .operatingSystem( new OperatingSystem.Builder().family(OsFamily.UBUNTU).arch("paravirtual").version("8.04") .description("alestic/ubuntu-8.04-hardy-base-20080905.manifest.xml").is64Bit(false) .build()).description("alestic/ubuntu-8.04-hardy-base-20080905.manifest.xml") .defaultCredentials(LoginCredentials.builder().user("ubuntu").build()).id("us-east-1/ami-c0fa1ea9") .providerId("ami-c0fa1ea9").location(defaultLocation).version("20080905") .userMetadata(ImmutableMap.of("owner", "063491364108", "rootDeviceType", "instance-store")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE).backendStatus("available").build()); assertEquals(Iterables.get(result, 4).getStatus(), org.jclouds.compute.domain.Image.Status.AVAILABLE); assertEquals( Iterables.get(result, 6), new ImageBuilder() .name("ebs/ubuntu-images/ubuntu-lucid-10.04-i386-server-20100827") .operatingSystem( new OperatingSystem.Builder().family(OsFamily.UBUNTU).arch("paravirtual").version("10.04") .description("099720109477/ebs/ubuntu-images/ubuntu-lucid-10.04-i386-server-20100827") .is64Bit(false).build()) .description("099720109477/ebs/ubuntu-images/ubuntu-lucid-10.04-i386-server-20100827") .defaultCredentials(LoginCredentials.builder().user("ubuntu").build()).id("us-east-1/ami-10f3a255") .providerId("ami-10f3a255").location(defaultLocation).version("20100827") .userMetadata(ImmutableMap.of( "owner", "099720109477", "rootDeviceType", "ebs", "virtualizationType", "paravirtual", "hypervisor", "xen")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE).backendStatus("available").build()); assertEquals(Iterables.get(result, 6).getStatus(), org.jclouds.compute.domain.Image.Status.AVAILABLE); } public void testParseVostokImage() { Set<org.jclouds.compute.domain.Image> result = convertImages("/vostok.xml"); assertEquals( Iterables.get(result, 0), new ImageBuilder() .operatingSystem( new OperatingSystem.Builder().family(OsFamily.UNRECOGNIZED).arch("paravirtual").version("") .description("vostok-builds/vostok-0.95-5622/vostok-0.95-5622.manifest.xml") .is64Bit(false).build()) .description("vostok-builds/vostok-0.95-5622/vostok-0.95-5622.manifest.xml") .defaultCredentials(LoginCredentials.builder().user("root").build()).id("us-east-1/ami-870de2ee") .providerId("ami-870de2ee").location(defaultLocation).version("5622") .userMetadata(ImmutableMap.of("owner", "133804938231", "rootDeviceType", "instance-store")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE).build()); } public void testParseCCImage() { Set<org.jclouds.compute.domain.Image> result = convertImages("/describe_images_cc.xml"); assertEquals( Iterables.get(result, 0), new ImageBuilder() .name("EC2 CentOS 5.4 HVM AMI") .operatingSystem( new OperatingSystem.Builder().family(OsFamily.CENTOS).arch("hvm").version("5.4") .description("amazon/EC2 CentOS 5.4 HVM AMI").is64Bit(true).build()) .description("EC2 CentOS 5.4 HVM AMI") .defaultCredentials(LoginCredentials.builder().user("root").build()).id("us-east-1/ami-7ea24a17") .providerId("ami-7ea24a17").location(defaultLocation) .userMetadata(ImmutableMap.of( "owner", "206029621532", "rootDeviceType", "ebs", "virtualizationType", "hvm", "hypervisor", "xen")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE).build()); assertEquals(Iterables.get(result, 0).getStatus(), org.jclouds.compute.domain.Image.Status.AVAILABLE); } public void testParseRightScaleImage() { Set<org.jclouds.compute.domain.Image> result = convertImages("/rightscale_images.xml"); assertEquals( Iterables.get(result, 0), new ImageBuilder() .operatingSystem( new OperatingSystem.Builder().family(OsFamily.CENTOS).arch("paravirtual").version("5.4") .description("rightscale-us-east/CentOS_5.4_x64_v4.4.10.manifest.xml").is64Bit(true) .build()).description("rightscale-us-east/CentOS_5.4_x64_v4.4.10.manifest.xml") .defaultCredentials(LoginCredentials.builder().user("root").build()).id("us-east-1/ami-ccb35ea5") .providerId("ami-ccb35ea5").location(defaultLocation).version("4.4.10") .userMetadata(ImmutableMap.of("owner", "admin", "rootDeviceType", "instance-store")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE).backendStatus("available").build()); assertEquals(Iterables.get(result, 0).getStatus(), org.jclouds.compute.domain.Image.Status.AVAILABLE); assertEquals( new Gson().toJson(Iterables.get(result, 1)), "{\"operatingSystem\":{\"family\":\"UBUNTU\",\"arch\":\"paravirtual\",\"version\":\"9.10\",\"description\":\"411009282317/RightImage_Ubuntu_9.10_x64_v4.5.3_EBS_Alpha\",\"is64Bit\":true},\"status\":\"AVAILABLE\",\"backendStatus\":\"available\",\"version\":\"4.5.3_EBS_Alpha\",\"description\":\"RightImage_Ubuntu_9.10_x64_v4.5.3_EBS_Alpha\",\"defaultCredentials\":{\"authenticateSudo\":false,\"password\":{},\"privateKey\":{},\"identity\":\"root\"},\"id\":\"us-east-1/ami-c19db6b5\",\"type\":\"IMAGE\",\"tags\":[],\"providerId\":\"ami-c19db6b5\",\"name\":\"RightImage_Ubuntu_9.10_x64_v4.5.3_EBS_Alpha\",\"location\":{\"scope\":\"REGION\",\"id\":\"us-east-1\",\"description\":\"us-east-1\",\"iso3166Codes\":[],\"metadata\":{}},\"userMetadata\":{\"owner\":\"411009282317\",\"rootDeviceType\":\"ebs\",\"virtualizationType\":\"paravirtual\",\"hypervisor\":\"xen\"}}"); assertEquals( new Gson().toJson(Iterables.get(result, 2)), "{\"operatingSystem\":{\"family\":\"WINDOWS\",\"arch\":\"hvm\",\"version\":\"2003\",\"description\":\"411009282317/RightImage Windows_2003_i386_v5.4.3\",\"is64Bit\":false},\"status\":\"AVAILABLE\",\"backendStatus\":\"available\",\"version\":\"5.4.3\",\"description\":\"Built by RightScale\",\"defaultCredentials\":{\"authenticateSudo\":false,\"password\":{},\"privateKey\":{},\"identity\":\"root\"},\"id\":\"us-east-1/ami-710c2605\",\"type\":\"IMAGE\",\"tags\":[],\"providerId\":\"ami-710c2605\",\"name\":\"RightImage Windows_2003_i386_v5.4.3\",\"location\":{\"scope\":\"REGION\",\"id\":\"us-east-1\",\"description\":\"us-east-1\",\"iso3166Codes\":[],\"metadata\":{}},\"userMetadata\":{\"owner\":\"411009282317\",\"rootDeviceType\":\"ebs\",\"virtualizationType\":\"hvm\",\"hypervisor\":\"xen\"}}"); assertEquals( new Gson().toJson(Iterables.get(result, 3)), "{\"operatingSystem\":{\"family\":\"CENTOS\",\"arch\":\"paravirtual\",\"version\":\"6.5\",\"description\":\"411009282317/RightImage_CentOS_6.5_x64_v13.5.2.2_EBS\",\"is64Bit\":true},\"status\":\"AVAILABLE\",\"backendStatus\":\"available\",\"version\":\"13.5.2.2_EBS\",\"description\":\"RightImage_CentOS_6.5_x64_v13.5.2.2_EBS\",\"defaultCredentials\":{\"authenticateSudo\":false,\"password\":{},\"privateKey\":{},\"identity\":\"root\"},\"id\":\"us-east-1/ami-05ebd06c\",\"type\":\"IMAGE\",\"tags\":[],\"providerId\":\"ami-05ebd06c\",\"name\":\"RightImage_CentOS_6.5_x64_v13.5.2.2_EBS\",\"location\":{\"scope\":\"REGION\",\"id\":\"us-east-1\",\"description\":\"us-east-1\",\"iso3166Codes\":[],\"metadata\":{}},\"userMetadata\":{\"owner\":\"411009282317\",\"rootDeviceType\":\"ebs\",\"virtualizationType\":\"paravirtual\",\"hypervisor\":\"xen\"}}"); assertEquals( new Gson().toJson(Iterables.get(result, 4)), "{\"operatingSystem\":{\"family\":\"UBUNTU\",\"arch\":\"paravirtual\",\"version\":\"10.04\",\"description\":\"411009282317/RightImage_Ubuntu_10.04_x64_v12.11.4_EBS\",\"is64Bit\":true},\"status\":\"AVAILABLE\",\"backendStatus\":\"available\",\"version\":\"12.11.4_EBS\",\"description\":\"RightImage_Ubuntu_10.04_x64_v12.11.4_EBS\",\"defaultCredentials\":{\"authenticateSudo\":false,\"password\":{},\"privateKey\":{},\"identity\":\"root\"},\"id\":\"us-east-1/ami-08bffe61\",\"type\":\"IMAGE\",\"tags\":[],\"providerId\":\"ami-08bffe61\",\"name\":\"RightImage_Ubuntu_10.04_x64_v12.11.4_EBS\",\"location\":{\"scope\":\"REGION\",\"id\":\"us-east-1\",\"description\":\"us-east-1\",\"iso3166Codes\":[],\"metadata\":{}},\"userMetadata\":{\"owner\":\"411009282317\",\"rootDeviceType\":\"ebs\",\"virtualizationType\":\"paravirtual\",\"hypervisor\":\"xen\"}}"); } public void testParseAmznImage() { Set<org.jclouds.compute.domain.Image> result = convertImages("/amzn_images.xml"); assertEquals( Iterables.get(result, 0), new ImageBuilder() .name("amzn-ami-0.9.7-beta.i386-ebs") .operatingSystem( new OperatingSystem.Builder().family(OsFamily.AMZN_LINUX).arch("paravirtual") .version("0.9.7-beta").description("137112412989/amzn-ami-0.9.7-beta.i386-ebs") .is64Bit(false).build()).description("Amazon") .defaultCredentials(LoginCredentials.builder().user("ec2-user").build()).id("us-east-1/ami-82e4b5c7") .providerId("ami-82e4b5c7").location(defaultLocation).version("0.9.7-beta") .userMetadata(ImmutableMap.of( "owner", "137112412989", "rootDeviceType", "ebs", "virtualizationType", "paravirtual", "hypervisor", "xen")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE).build()); assertEquals(Iterables.get(result, 0).getStatus(), org.jclouds.compute.domain.Image.Status.AVAILABLE); assertEquals( Iterables.get(result, 3), new ImageBuilder() .name("amzn-ami-0.9.7-beta.x86_64-S3") .operatingSystem( new OperatingSystem.Builder().family(OsFamily.AMZN_LINUX).arch("paravirtual") .version("0.9.7-beta") .description("amzn-ami-us-west-1/amzn-ami-0.9.7-beta.x86_64.manifest.xml").is64Bit(true) .build()).description("Amazon Linux AMI x86_64 S3") .defaultCredentials(LoginCredentials.builder().user("ec2-user").build()).id("us-east-1/ami-f2e4b5b7") .providerId("ami-f2e4b5b7").location(defaultLocation).version("0.9.7-beta") .userMetadata(ImmutableMap.of( "owner", "137112412989", "rootDeviceType", "ebs", "virtualizationType", "paravirtual", "hypervisor", "xen")) .status(org.jclouds.compute.domain.Image.Status.AVAILABLE).build()); assertEquals(Iterables.get(result, 3).getStatus(), org.jclouds.compute.domain.Image.Status.AVAILABLE); } static Location defaultLocation = new LocationBuilder().scope(LocationScope.REGION).id("us-east-1") .description("us-east-1").build(); public static Set<org.jclouds.compute.domain.Image> convertImages(String resource) { Map<OsFamily, Map<String, String>> map = new BaseComputeServiceContextModule() { }.provideOsVersionMap(new ComputeServiceConstants.ReferenceData(), Guice.createInjector(new GsonModule()) .getInstance(Json.class)); Set<Image> result = DescribeImagesResponseHandlerTest.parseImages(resource); EC2ImageParser parser = new EC2ImageParser(EC2ComputeServiceDependenciesModule.toPortableImageStatus, new EC2PopulateDefaultLoginCredentialsForImageStrategy(), map, Suppliers .<Set<? extends Location>> ofInstance(ImmutableSet.<Location> of(defaultLocation)), Suppliers .ofInstance(defaultLocation), new AWSEC2ReviseParsedImage(map)); return Sets.newLinkedHashSet(Iterables.filter(Iterables.transform(result, parser), Predicates.notNull())); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.engine; import com.google.common.collect.Lists; import org.apache.lucene.index.*; import org.apache.lucene.index.IndexWriter.IndexReaderWarmer; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.InfoStream; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.routing.DjbHashFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.LoggerInfoStream; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.math.MathUtils; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit; import org.elasticsearch.index.indexing.ShardIndexingService; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.merge.OnGoingMerge; import org.elasticsearch.index.merge.policy.ElasticsearchMergePolicy; import org.elasticsearch.index.merge.policy.MergePolicyProvider; import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider; import org.elasticsearch.index.search.nested.IncludeNestedDocsQuery; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogCorruptedException; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * */ public class InternalEngine extends Engine { private final FailEngineOnMergeFailure mergeSchedulerFailureListener; private final MergeSchedulerListener mergeSchedulerListener; /** * When we last pruned expired tombstones from versionMap.deletes: */ private volatile long lastDeleteVersionPruneTimeMSec; private final ShardIndexingService indexingService; @Nullable private final IndicesWarmer warmer; private final Translog translog; private final MergePolicyProvider mergePolicyProvider; private final MergeSchedulerProvider mergeScheduler; private final IndexWriter indexWriter; private final SearcherFactory searcherFactory; private final SearcherManager searcherManager; private final Lock flushLock = new ReentrantLock(); private final ReentrantLock optimizeLock = new ReentrantLock(); // A uid (in the form of BytesRef) to the version map // we use the hashed variant since we iterate over it and check removal and additions on existing keys private final LiveVersionMap versionMap; private final Object[] dirtyLocks; private final AtomicBoolean versionMapRefreshPending = new AtomicBoolean(); private volatile SegmentInfos lastCommittedSegmentInfos; private final IndexThrottle throttle; public InternalEngine(EngineConfig engineConfig, boolean skipInitialTranslogRecovery) throws EngineException { super(engineConfig); this.versionMap = new LiveVersionMap(); store.incRef(); IndexWriter writer = null; Translog translog = null; SearcherManager manager = null; boolean success = false; try { this.lastDeleteVersionPruneTimeMSec = engineConfig.getThreadPool().estimatedTimeInMillis(); this.indexingService = engineConfig.getIndexingService(); this.warmer = engineConfig.getWarmer(); this.mergePolicyProvider = engineConfig.getMergePolicyProvider(); this.mergeScheduler = engineConfig.getMergeScheduler(); this.dirtyLocks = new Object[engineConfig.getIndexConcurrency() * 50]; // we multiply it to have enough... for (int i = 0; i < dirtyLocks.length; i++) { dirtyLocks[i] = new Object(); } throttle = new IndexThrottle(); this.searcherFactory = new SearchFactory(logger, isClosed, engineConfig); final Translog.TranslogGeneration translogGeneration; try { // TODO: would be better if ES could tell us "from above" whether this shard was already here, instead of using Lucene's API // (which relies on IO ops, directory listing, and has had scary bugs in the past): boolean create = !Lucene.indexExists(store.directory()); writer = createWriter(create); indexWriter = writer; translog = openTranslog(engineConfig, writer, create || skipInitialTranslogRecovery || engineConfig.forceNewTranslog()); translogGeneration = translog.getGeneration(); assert translogGeneration != null; } catch (IOException | TranslogCorruptedException e) { throw new EngineCreationFailureException(shardId, "failed to create engine", e); } this.translog = translog; manager = createSearcherManager(); this.searcherManager = manager; this.versionMap.setManager(searcherManager); this.mergeSchedulerFailureListener = new FailEngineOnMergeFailure(); this.mergeSchedulerListener = new MergeSchedulerListener(); this.mergeScheduler.addListener(mergeSchedulerListener); this.mergeScheduler.addFailureListener(mergeSchedulerFailureListener); try { if (skipInitialTranslogRecovery) { // make sure we point at the latest translog from now on.. commitIndexWriter(writer, translog, lastCommittedSegmentInfos.getUserData().get(SYNC_COMMIT_ID)); } else { recoverFromTranslog(engineConfig, translogGeneration); } } catch (IOException | EngineException ex) { throw new EngineCreationFailureException(shardId, "failed to recover from translog", ex); } success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(writer, translog, manager); versionMap.clear(); if (isClosed.get() == false) { // failure we need to dec the store reference store.decRef(); } } } logger.trace("created new InternalEngine"); } private Translog openTranslog(EngineConfig engineConfig, IndexWriter writer, boolean createNew) throws IOException { final Translog.TranslogGeneration generation = loadTranslogIdFromCommit(writer); final TranslogConfig translogConfig = engineConfig.getTranslogConfig(); if (createNew == false) { // We expect that this shard already exists, so it must already have an existing translog else something is badly wrong! if (generation == null) { throw new IllegalStateException("no translog generation present in commit data but translog is expected to exist"); } translogConfig.setTranslogGeneration(generation); if (generation != null && generation.translogUUID == null) { // only upgrade on pre-2.0 indices... Translog.upgradeLegacyTranslog(logger, translogConfig); } } final Translog translog = new Translog(translogConfig); if (generation == null) { logger.debug("no translog ID present in the current generation - creating one"); boolean success = false; try { commitIndexWriter(writer, translog); success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(translog); } } } return translog; } @Override public Translog getTranslog() { ensureOpen(); return translog; } protected void recoverFromTranslog(EngineConfig engineConfig, Translog.TranslogGeneration translogGeneration) throws IOException { int opsRecovered = 0; final TranslogRecoveryPerformer handler = engineConfig.getTranslogRecoveryPerformer(); try (Translog.Snapshot snapshot = translog.newSnapshot()) { Translog.Operation operation; while ((operation = snapshot.next()) != null) { try { handler.performRecoveryOperation(this, operation); opsRecovered++; } catch (ElasticsearchException e) { if (e.status() == RestStatus.BAD_REQUEST) { // mainly for MapperParsingException and Failure to detect xcontent logger.info("ignoring recovery of a corrupt translog entry", e); } else { throw e; } } } } catch (Throwable e) { throw new EngineException(shardId, "failed to recover from translog", e); } // flush if we recovered something or if we have references to older translogs // note: if opsRecovered == 0 and we have older translogs it means they are corrupted or 0 length. if (opsRecovered > 0) { logger.trace("flushing post recovery from translog. ops recovered [{}]. committed translog id [{}]. current id [{}]", opsRecovered, translogGeneration == null ? null : translogGeneration.translogFileGeneration, translog.currentFileGeneration()); flush(true, true); } else if (translog.isCurrent(translogGeneration) == false){ commitIndexWriter(indexWriter, translog, lastCommittedSegmentInfos.getUserData().get(Engine.SYNC_COMMIT_ID)); } } /** * Reads the current stored translog ID from the IW commit data. If the id is not found, recommits the current * translog id into lucene and returns null. */ @Nullable private Translog.TranslogGeneration loadTranslogIdFromCommit(IndexWriter writer) throws IOException { // commit on a just opened writer will commit even if there are no changes done to it // we rely on that for the commit data translog id key final Map<String, String> commitUserData = writer.getCommitData(); if (commitUserData.containsKey("translog_id")) { assert commitUserData.containsKey(Translog.TRANSLOG_UUID_KEY) == false : "legacy commit contains translog UUID"; return new Translog.TranslogGeneration(null, Long.parseLong(commitUserData.get("translog_id"))); } else if (commitUserData.containsKey(Translog.TRANSLOG_GENERATION_KEY)) { if (commitUserData.containsKey(Translog.TRANSLOG_UUID_KEY) == false) { throw new IllegalStateException("commit doesn't contain translog UUID"); } final String translogUUID = commitUserData.get(Translog.TRANSLOG_UUID_KEY); final long translogGen = Long.parseLong(commitUserData.get(Translog.TRANSLOG_GENERATION_KEY)); return new Translog.TranslogGeneration(translogUUID, translogGen); } return null; } private SearcherManager createSearcherManager() throws EngineException { boolean success = false; SearcherManager searcherManager = null; try { try { final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter, true), shardId); searcherManager = new SearcherManager(directoryReader, searcherFactory); lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); success = true; return searcherManager; } catch (IOException e) { maybeFailEngine("start", e); try { indexWriter.rollback(); } catch (IOException e1) { // iw is closed below e.addSuppressed(e1); } throw new EngineCreationFailureException(shardId, "failed to open reader on writer", e); } } finally { if (success == false) { // release everything we created on a failure IOUtils.closeWhileHandlingException(searcherManager, indexWriter); } } } private void updateIndexWriterSettings() { try { final LiveIndexWriterConfig iwc = indexWriter.getConfig(); iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().mbFrac()); iwc.setUseCompoundFile(engineConfig.isCompoundOnFlush()); } catch (AlreadyClosedException ex) { // ignore } } @Override public GetResult get(Get get) throws EngineException { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); if (get.realtime()) { VersionValue versionValue = versionMap.getUnderLock(get.uid().bytes()); if (versionValue != null) { if (versionValue.delete()) { return GetResult.NOT_EXISTS; } if (get.versionType().isVersionConflictForReads(versionValue.version(), get.version())) { Uid uid = Uid.createUid(get.uid().text()); throw new VersionConflictEngineException(shardId, uid.type(), uid.id(), versionValue.version(), get.version()); } if (!get.loadSource()) { return new GetResult(true, versionValue.version(), null); } Translog.Operation op = translog.read(versionValue.translogLocation()); if (op != null) { return new GetResult(true, versionValue.version(), op.getSource()); } } } // no version, get the version from the index, we know that we refresh on flush return getFromSearcher(get); } } @Override public void create(Create create) throws EngineException { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); if (create.origin() == Operation.Origin.RECOVERY) { // Don't throttle recovery operations innerCreate(create); } else { try (Releasable r = throttle.acquireThrottle()) { innerCreate(create); } } } catch (OutOfMemoryError | IllegalStateException | IOException t) { maybeFailEngine("create", t); throw new CreateFailedEngineException(shardId, create, t); } checkVersionMapRefresh(); } private void innerCreate(Create create) throws IOException { if (engineConfig.isOptimizeAutoGenerateId() && create.autoGeneratedId() && !create.canHaveDuplicates()) { // We don't need to lock because this ID cannot be concurrently updated: innerCreateNoLock(create, Versions.NOT_FOUND, null); } else { synchronized (dirtyLock(create.uid())) { final long currentVersion; final VersionValue versionValue; versionValue = versionMap.getUnderLock(create.uid().bytes()); if (versionValue == null) { currentVersion = loadCurrentVersionFromIndex(create.uid()); } else { if (engineConfig.isEnableGcDeletes() && versionValue.delete() && (engineConfig.getThreadPool().estimatedTimeInMillis() - versionValue.time()) > engineConfig.getGcDeletesInMillis()) { currentVersion = Versions.NOT_FOUND; // deleted, and GC } else { currentVersion = versionValue.version(); } } innerCreateNoLock(create, currentVersion, versionValue); } } } private void innerCreateNoLock(Create create, long currentVersion, VersionValue versionValue) throws IOException { // same logic as index long updatedVersion; long expectedVersion = create.version(); if (create.versionType().isVersionConflictForWrites(currentVersion, expectedVersion)) { if (create.origin() == Operation.Origin.RECOVERY) { return; } else { throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion); } } updatedVersion = create.versionType().updateVersion(currentVersion, expectedVersion); // if the doc exists boolean doUpdate = false; if ((versionValue != null && versionValue.delete() == false) || (versionValue == null && currentVersion != Versions.NOT_FOUND)) { if (create.origin() == Operation.Origin.RECOVERY) { return; } else if (create.origin() == Operation.Origin.REPLICA) { // #7142: the primary already determined it's OK to index this document, and we confirmed above that the version doesn't // conflict, so we must also update here on the replica to remain consistent: doUpdate = true; } else if (create.origin() == Operation.Origin.PRIMARY && create.autoGeneratedId() && create.canHaveDuplicates() && currentVersion == 1 && create.version() == Versions.MATCH_ANY) { /** * If bulk index request fails due to a disconnect, unavailable shard etc. then the request is * retried before it actually fails. However, the documents might already be indexed. * For autogenerated ids this means that a version conflict will be reported in the bulk request * although the document was indexed properly. * To avoid this we have to make sure that the index request is treated as an update and set updatedVersion to 1. * See also discussion on https://github.com/elasticsearch/elasticsearch/pull/9125 */ doUpdate = true; updatedVersion = 1; } else { // On primary, we throw DAEE if the _uid is already in the index with an older version: assert create.origin() == Operation.Origin.PRIMARY; throw new DocumentAlreadyExistsException(shardId, create.type(), create.id()); } } create.updateVersion(updatedVersion); if (doUpdate) { if (create.docs().size() > 1) { indexWriter.updateDocuments(create.uid(), create.docs()); } else { indexWriter.updateDocument(create.uid(), create.docs().get(0)); } } else { if (create.docs().size() > 1) { indexWriter.addDocuments(create.docs()); } else { indexWriter.addDocument(create.docs().get(0)); } } Translog.Location translogLocation = translog.add(new Translog.Create(create)); versionMap.putUnderLock(create.uid().bytes(), new VersionValue(updatedVersion, translogLocation)); create.setTranslogLocation(translogLocation); indexingService.postCreateUnderLock(create); } @Override public boolean index(Index index) throws EngineException { final boolean created; try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); if (index.origin() == Operation.Origin.RECOVERY) { // Don't throttle recovery operations created = innerIndex(index); } else { try (Releasable r = throttle.acquireThrottle()) { created = innerIndex(index); } } } catch (OutOfMemoryError | IllegalStateException | IOException t) { maybeFailEngine("index", t); throw new IndexFailedEngineException(shardId, index, t); } checkVersionMapRefresh(); return created; } /** * Forces a refresh if the versionMap is using too much RAM */ private void checkVersionMapRefresh() { if (versionMap.ramBytesUsedForRefresh() > config().getVersionMapSize().bytes() && versionMapRefreshPending.getAndSet(true) == false) { try { if (isClosed.get()) { // no point... return; } // Now refresh to clear versionMap: engineConfig.getThreadPool().executor(ThreadPool.Names.REFRESH).execute(new Runnable() { @Override public void run() { try { refresh("version_table_full"); } catch (EngineClosedException ex) { // ignore } } }); } catch (EsRejectedExecutionException ex) { // that is fine too.. we might be shutting down } } } private boolean innerIndex(Index index) throws IOException { synchronized (dirtyLock(index.uid())) { final long currentVersion; VersionValue versionValue = versionMap.getUnderLock(index.uid().bytes()); if (versionValue == null) { currentVersion = loadCurrentVersionFromIndex(index.uid()); } else { if (engineConfig.isEnableGcDeletes() && versionValue.delete() && (engineConfig.getThreadPool().estimatedTimeInMillis() - versionValue.time()) > engineConfig.getGcDeletesInMillis()) { currentVersion = Versions.NOT_FOUND; // deleted, and GC } else { currentVersion = versionValue.version(); } } long updatedVersion; long expectedVersion = index.version(); if (index.versionType().isVersionConflictForWrites(currentVersion, expectedVersion)) { if (index.origin() == Operation.Origin.RECOVERY) { return false; } else { throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion); } } updatedVersion = index.versionType().updateVersion(currentVersion, expectedVersion); final boolean created; index.updateVersion(updatedVersion); if (currentVersion == Versions.NOT_FOUND) { // document does not exists, we can optimize for create created = true; if (index.docs().size() > 1) { indexWriter.addDocuments(index.docs()); } else { indexWriter.addDocument(index.docs().get(0)); } } else { if (versionValue != null) { created = versionValue.delete(); // we have a delete which is not GC'ed... } else { created = false; } if (index.docs().size() > 1) { indexWriter.updateDocuments(index.uid(), index.docs()); } else { indexWriter.updateDocument(index.uid(), index.docs().get(0)); } } Translog.Location translogLocation = translog.add(new Translog.Index(index)); versionMap.putUnderLock(index.uid().bytes(), new VersionValue(updatedVersion, translogLocation)); index.setTranslogLocation(translogLocation); indexingService.postIndexUnderLock(index); return created; } } @Override public void delete(Delete delete) throws EngineException { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); // NOTE: we don't throttle this when merges fall behind because delete-by-id does not create new segments: innerDelete(delete); } catch (OutOfMemoryError | IllegalStateException | IOException t) { maybeFailEngine("delete", t); throw new DeleteFailedEngineException(shardId, delete, t); } maybePruneDeletedTombstones(); checkVersionMapRefresh(); } private void maybePruneDeletedTombstones() { // It's expensive to prune because we walk the deletes map acquiring dirtyLock for each uid so we only do it // every 1/4 of gcDeletesInMillis: if (engineConfig.isEnableGcDeletes() && engineConfig.getThreadPool().estimatedTimeInMillis() - lastDeleteVersionPruneTimeMSec > engineConfig.getGcDeletesInMillis() * 0.25) { pruneDeletedTombstones(); } } private void innerDelete(Delete delete) throws IOException { synchronized (dirtyLock(delete.uid())) { final long currentVersion; VersionValue versionValue = versionMap.getUnderLock(delete.uid().bytes()); if (versionValue == null) { currentVersion = loadCurrentVersionFromIndex(delete.uid()); } else { if (engineConfig.isEnableGcDeletes() && versionValue.delete() && (engineConfig.getThreadPool().estimatedTimeInMillis() - versionValue.time()) > engineConfig.getGcDeletesInMillis()) { currentVersion = Versions.NOT_FOUND; // deleted, and GC } else { currentVersion = versionValue.version(); } } long updatedVersion; long expectedVersion = delete.version(); if (delete.versionType().isVersionConflictForWrites(currentVersion, expectedVersion)) { if (delete.origin() == Operation.Origin.RECOVERY) { return; } else { throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, expectedVersion); } } updatedVersion = delete.versionType().updateVersion(currentVersion, expectedVersion); final boolean found; if (currentVersion == Versions.NOT_FOUND) { // doc does not exist and no prior deletes found = false; } else if (versionValue != null && versionValue.delete()) { // a "delete on delete", in this case, we still increment the version, log it, and return that version found = false; } else { // we deleted a currently existing document indexWriter.deleteDocuments(delete.uid()); found = true; } delete.updateVersion(updatedVersion, found); Translog.Location translogLocation = translog.add(new Translog.Delete(delete)); versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), translogLocation)); delete.setTranslogLocation(translogLocation); indexingService.postDeleteUnderLock(delete); } } /** @deprecated This was removed, but we keep this API so translog can replay any DBQs on upgrade. */ @Deprecated @Override public void delete(DeleteByQuery delete) throws EngineException { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); if (delete.origin() == Operation.Origin.RECOVERY) { // Don't throttle recovery operations innerDelete(delete); } else { try (Releasable r = throttle.acquireThrottle()) { innerDelete(delete); } } } } private void innerDelete(DeleteByQuery delete) throws EngineException { try { Query query = delete.query(); if (delete.aliasFilter() != null) { BooleanQuery boolQuery = new BooleanQuery(); boolQuery.add(query, Occur.MUST); boolQuery.add(delete.aliasFilter(), Occur.FILTER); query = boolQuery; } if (delete.nested()) { query = new IncludeNestedDocsQuery(query, delete.parentFilter()); } indexWriter.deleteDocuments(query); translog.add(new Translog.DeleteByQuery(delete)); } catch (Throwable t) { maybeFailEngine("delete_by_query", t); throw new DeleteByQueryFailedEngineException(shardId, delete, t); } // TODO: This is heavy, since we refresh, but we must do this because we don't know which documents were in fact deleted (i.e., our // versionMap isn't updated), so we must force a cutover to a new reader to "see" the deletions: refresh("delete_by_query"); } @Override public void refresh(String source) throws EngineException { // we obtain a read lock here, since we don't want a flush to happen while we are refreshing // since it flushes the index as well (though, in terms of concurrency, we are allowed to do it) try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); updateIndexWriterSettings(); searcherManager.maybeRefreshBlocking(); } catch (AlreadyClosedException e) { ensureOpen(); maybeFailEngine("refresh", e); } catch (EngineClosedException e) { throw e; } catch (Throwable t) { failEngine("refresh failed", t); throw new RefreshFailedEngineException(shardId, t); } // TODO: maybe we should just put a scheduled job in threadPool? // We check for pruning in each delete request, but we also prune here e.g. in case a delete burst comes in and then no more deletes // for a long time: maybePruneDeletedTombstones(); versionMapRefreshPending.set(false); } @Override public SyncedFlushResult syncFlush(String syncId, CommitId expectedCommitId) throws EngineException { // best effort attempt before we acquire locks ensureOpen(); if (indexWriter.hasUncommittedChanges()) { logger.trace("can't sync commit [{}]. have pending changes", syncId); return SyncedFlushResult.PENDING_OPERATIONS; } if (expectedCommitId.idsEqual(lastCommittedSegmentInfos.getId()) == false) { logger.trace("can't sync commit [{}]. current commit id is not equal to expected.", syncId); return SyncedFlushResult.COMMIT_MISMATCH; } try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); if (indexWriter.hasUncommittedChanges()) { logger.trace("can't sync commit [{}]. have pending changes", syncId); return SyncedFlushResult.PENDING_OPERATIONS; } if (expectedCommitId.idsEqual(lastCommittedSegmentInfos.getId()) == false) { logger.trace("can't sync commit [{}]. current commit id is not equal to expected.", syncId); return SyncedFlushResult.COMMIT_MISMATCH; } logger.trace("starting sync commit [{}]", syncId); commitIndexWriter(indexWriter, translog, syncId); logger.debug("successfully sync committed. sync id [{}].", syncId); lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); return SyncedFlushResult.SUCCESS; } catch (IOException ex) { maybeFailEngine("sync commit", ex); throw new EngineException(shardId, "failed to sync commit", ex); } } @Override public CommitId flush() throws EngineException { return flush(false, false); } @Override public CommitId flush(boolean force, boolean waitIfOngoing) throws EngineException { ensureOpen(); final byte[] newCommitId; /* * Unfortunately the lock order is important here. We have to acquire the readlock first otherwise * if we are flushing at the end of the recovery while holding the write lock we can deadlock if: * Thread 1: flushes via API and gets the flush lock but blocks on the readlock since Thread 2 has the writeLock * Thread 2: flushes at the end of the recovery holding the writeLock and blocks on the flushLock owned by Thread 1 */ try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); updateIndexWriterSettings(); if (flushLock.tryLock() == false) { // if we can't get the lock right away we block if needed otherwise barf if (waitIfOngoing) { logger.trace("waiting for in-flight flush to finish"); flushLock.lock(); logger.trace("acquired flush lock after blocking"); } else { throw new FlushNotAllowedEngineException(shardId, "already flushing..."); } } else { logger.trace("acquired flush lock immediately"); } try { if (indexWriter.hasUncommittedChanges() || force) { try { translog.prepareCommit(); logger.trace("starting commit for flush; commitTranslog=true"); commitIndexWriter(indexWriter, translog); logger.trace("finished commit for flush"); translog.commit(); // we need to refresh in order to clear older version values refresh("version_table_flush"); } catch (Throwable e) { throw new FlushFailedEngineException(shardId, e); } } /* * we have to inc-ref the store here since if the engine is closed by a tragic event * we don't acquire the write lock and wait until we have exclusive access. This might also * dec the store reference which can essentially close the store and unless we can inc the reference * we can't use it. */ store.incRef(); try { // reread the last committed segment infos lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); } catch (Throwable e) { if (isClosed.get() == false) { logger.warn("failed to read latest segment infos on flush", e); if (Lucene.isCorruptionException(e)) { throw new FlushFailedEngineException(shardId, e); } } } finally { store.decRef(); } newCommitId = lastCommittedSegmentInfos.getId(); } catch (FlushFailedEngineException ex) { maybeFailEngine("flush", ex); throw ex; } finally { flushLock.unlock(); } } // We don't have to do this here; we do it defensively to make sure that even if wall clock time is misbehaving // (e.g., moves backwards) we will at least still sometimes prune deleted tombstones: if (engineConfig.isEnableGcDeletes()) { pruneDeletedTombstones(); } return new CommitId(newCommitId); } private void pruneDeletedTombstones() { long timeMSec = engineConfig.getThreadPool().estimatedTimeInMillis(); // TODO: not good that we reach into LiveVersionMap here; can we move this inside VersionMap instead? problem is the dirtyLock... // we only need to prune the deletes map; the current/old version maps are cleared on refresh: for (Map.Entry<BytesRef, VersionValue> entry : versionMap.getAllTombstones()) { BytesRef uid = entry.getKey(); synchronized (dirtyLock(uid)) { // can we do it without this lock on each value? maybe batch to a set and get the lock once per set? // Must re-get it here, vs using entry.getValue(), in case the uid was indexed/deleted since we pulled the iterator: VersionValue versionValue = versionMap.getTombstoneUnderLock(uid); if (versionValue != null) { if (timeMSec - versionValue.time() > engineConfig.getGcDeletesInMillis()) { versionMap.removeTombstoneUnderLock(uid); } } } } lastDeleteVersionPruneTimeMSec = timeMSec; } @Override public void forceMerge(final boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, final boolean upgrade, final boolean upgradeOnlyAncientSegments) throws EngineException { /* * We do NOT acquire the readlock here since we are waiting on the merges to finish * that's fine since the IW.rollback should stop all the threads and trigger an IOException * causing us to fail the forceMerge * * The way we implement upgrades is a bit hackish in the sense that we set an instance * variable and that this setting will thus apply to the next forced merge that will be run. * This is ok because (1) this is the only place we call forceMerge, (2) we have a single * thread for optimize, and the 'optimizeLock' guarding this code, and (3) ConcurrentMergeScheduler * syncs calls to findForcedMerges. */ assert indexWriter.getConfig().getMergePolicy() instanceof ElasticsearchMergePolicy : "MergePolicy is " + indexWriter.getConfig().getMergePolicy().getClass().getName(); ElasticsearchMergePolicy mp = (ElasticsearchMergePolicy) indexWriter.getConfig().getMergePolicy(); optimizeLock.lock(); try { ensureOpen(); if (upgrade) { logger.info("starting segment upgrade upgradeOnlyAncientSegments={}", upgradeOnlyAncientSegments); mp.setUpgradeInProgress(true, upgradeOnlyAncientSegments); } store.incRef(); // increment the ref just to ensure nobody closes the store while we optimize try { if (onlyExpungeDeletes) { assert upgrade == false; indexWriter.forceMergeDeletes(true /* blocks and waits for merges*/); } else if (maxNumSegments <= 0) { assert upgrade == false; indexWriter.maybeMerge(); } else { indexWriter.forceMerge(maxNumSegments, true /* blocks and waits for merges*/); } if (flush) { flush(true, true); } if (upgrade) { logger.info("finished segment upgrade"); } } finally { store.decRef(); } } catch (Throwable t) { ForceMergeFailedEngineException ex = new ForceMergeFailedEngineException(shardId, t); maybeFailEngine("force merge", ex); throw ex; } finally { try { mp.setUpgradeInProgress(false, false); // reset it just to make sure we reset it in a case of an error } finally { optimizeLock.unlock(); } } } @Override public SnapshotIndexCommit snapshotIndex(final boolean flushFirst) throws EngineException { // we have to flush outside of the readlock otherwise we might have a problem upgrading // the to a write lock when we fail the engine in this operation if (flushFirst) { logger.trace("start flush for snapshot"); flush(false, true); logger.trace("finish flush for snapshot"); } try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); logger.trace("pulling snapshot"); return deletionPolicy.snapshot(); } catch (IOException e) { throw new SnapshotFailedEngineException(shardId, e); } } @Override protected boolean maybeFailEngine(String source, Throwable t) { boolean shouldFail = super.maybeFailEngine(source, t); if (shouldFail) { return true; } // Check for AlreadyClosedException if (t instanceof AlreadyClosedException) { // if we are already closed due to some tragic exception // we need to fail the engine. it might have already been failed before // but we are double-checking it's failed and closed if (indexWriter.isOpen() == false && indexWriter.getTragicException() != null) { failEngine("already closed by tragic event", indexWriter.getTragicException()); } return true; } else if (t != null && indexWriter.isOpen() == false && indexWriter.getTragicException() == t) { // this spot on - we are handling the tragic event exception here so we have to fail the engine // right away failEngine(source, t); return true; } return false; } @Override protected SegmentInfos getLastCommittedSegmentInfos() { return lastCommittedSegmentInfos; } @Override protected final void writerSegmentStats(SegmentsStats stats) { stats.addVersionMapMemoryInBytes(versionMap.ramBytesUsed()); stats.addIndexWriterMemoryInBytes(indexWriter.ramBytesUsed()); stats.addIndexWriterMaxMemoryInBytes((long) (indexWriter.getConfig().getRAMBufferSizeMB() * 1024 * 1024)); } @Override public List<Segment> segments(boolean verbose) { try (ReleasableLock lock = readLock.acquire()) { Segment[] segmentsArr = getSegmentInfo(lastCommittedSegmentInfos, verbose); // fill in the merges flag Set<OnGoingMerge> onGoingMerges = mergeScheduler.onGoingMerges(); for (OnGoingMerge onGoingMerge : onGoingMerges) { for (SegmentCommitInfo segmentInfoPerCommit : onGoingMerge.getMergedSegments()) { for (Segment segment : segmentsArr) { if (segment.getName().equals(segmentInfoPerCommit.info.name)) { segment.mergeId = onGoingMerge.getId(); break; } } } } return Arrays.asList(segmentsArr); } } /** * Closes the engine without acquiring the write lock. This should only be * called while the write lock is hold or in a disaster condition ie. if the engine * is failed. */ @Override protected final void closeNoLock(String reason) { if (isClosed.compareAndSet(false, true)) { assert rwl.isWriteLockedByCurrentThread() || failEngineLock.isHeldByCurrentThread() : "Either the write lock must be held or the engine must be currently be failing itself"; try { this.versionMap.clear(); try { IOUtils.close(searcherManager); } catch (Throwable t) { logger.warn("Failed to close SearcherManager", t); } try { IOUtils.close(translog); } catch (Throwable t) { logger.warn("Failed to close translog", t); } // no need to commit in this case!, we snapshot before we close the shard, so translog and all sync'ed logger.trace("rollback indexWriter"); try { indexWriter.rollback(); } catch (AlreadyClosedException e) { // ignore } logger.trace("rollback indexWriter done"); } catch (Throwable e) { logger.warn("failed to rollback writer on close", e); } finally { store.decRef(); this.mergeScheduler.removeListener(mergeSchedulerListener); this.mergeScheduler.removeFailureListener(mergeSchedulerFailureListener); logger.debug("engine closed [{}]", reason); } } } @Override public boolean hasUncommittedChanges() { return indexWriter.hasUncommittedChanges(); } @Override protected SearcherManager getSearcherManager() { return searcherManager; } private Object dirtyLock(BytesRef uid) { int hash = DjbHashFunction.DJB_HASH(uid.bytes, uid.offset, uid.length); return dirtyLocks[MathUtils.mod(hash, dirtyLocks.length)]; } private Object dirtyLock(Term uid) { return dirtyLock(uid.bytes()); } private long loadCurrentVersionFromIndex(Term uid) throws IOException { try (final Searcher searcher = acquireSearcher("load_version")) { return Versions.loadVersion(searcher.reader(), uid); } } private IndexWriter createWriter(boolean create) throws IOException { try { final IndexWriterConfig iwc = new IndexWriterConfig(engineConfig.getAnalyzer()); iwc.setCommitOnClose(false); // we by default don't commit on close iwc.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND); iwc.setIndexDeletionPolicy(deletionPolicy); // with tests.verbose, lucene sets this up: plumb to align with filesystem stream boolean verbose = false; try { verbose = Boolean.parseBoolean(System.getProperty("tests.verbose")); } catch (Throwable ignore) { } iwc.setInfoStream(verbose ? InfoStream.getDefault() : new LoggerInfoStream(logger)); iwc.setMergeScheduler(mergeScheduler.newMergeScheduler()); MergePolicy mergePolicy = mergePolicyProvider.getMergePolicy(); // Give us the opportunity to upgrade old segments while performing // background merges mergePolicy = new ElasticsearchMergePolicy(mergePolicy); iwc.setMergePolicy(mergePolicy); iwc.setSimilarity(engineConfig.getSimilarity()); iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().mbFrac()); iwc.setMaxThreadStates(engineConfig.getIndexConcurrency()); iwc.setCodec(engineConfig.getCodec()); /* We set this timeout to a highish value to work around * the default poll interval in the Lucene lock that is * 1000ms by default. We might need to poll multiple times * here but with 1s poll this is only executed twice at most * in combination with the default writelock timeout*/ iwc.setWriteLockTimeout(5000); iwc.setUseCompoundFile(this.engineConfig.isCompoundOnFlush()); // Warm-up hook for newly-merged segments. Warming up segments here is better since it will be performed at the end // of the merge operation and won't slow down _refresh iwc.setMergedSegmentWarmer(new IndexReaderWarmer() { @Override public void warm(LeafReader reader) throws IOException { try { assert isMergedSegment(reader); if (warmer != null) { final Engine.Searcher searcher = new Searcher("warmer", searcherFactory.newSearcher(reader, null)); final IndicesWarmer.WarmerContext context = new IndicesWarmer.WarmerContext(shardId, searcher); warmer.warmNewReaders(context); } } catch (Throwable t) { // Don't fail a merge if the warm-up failed if (isClosed.get() == false) { logger.warn("Warm-up failed", t); } if (t instanceof Error) { // assertion/out-of-memory error, don't ignore those throw (Error) t; } } } }); return new IndexWriter(store.directory(), iwc); } catch (LockObtainFailedException ex) { boolean isLocked = IndexWriter.isLocked(store.directory()); logger.warn("Could not lock IndexWriter isLocked [{}]", ex, isLocked); throw ex; } } /** Extended SearcherFactory that warms the segments if needed when acquiring a new searcher */ final static class SearchFactory extends EngineSearcherFactory { private final IndicesWarmer warmer; private final ShardId shardId; private final ESLogger logger; private final AtomicBoolean isEngineClosed; SearchFactory(ESLogger logger, AtomicBoolean isEngineClosed, EngineConfig engineConfig) { super(engineConfig); warmer = engineConfig.getWarmer(); shardId = engineConfig.getShardId(); this.logger = logger; this.isEngineClosed = isEngineClosed; } @Override public IndexSearcher newSearcher(IndexReader reader, IndexReader previousReader) throws IOException { IndexSearcher searcher = super.newSearcher(reader, previousReader); if (warmer != null) { // we need to pass a custom searcher that does not release anything on Engine.Search Release, // we will release explicitly IndexSearcher newSearcher = null; boolean closeNewSearcher = false; try { if (previousReader == null) { // we are starting up - no writer active so we can't acquire a searcher. newSearcher = searcher; } else { // figure out the newSearcher, with only the new readers that are relevant for us List<IndexReader> readers = Lists.newArrayList(); for (LeafReaderContext newReaderContext : reader.leaves()) { if (isMergedSegment(newReaderContext.reader())) { // merged segments are already handled by IndexWriterConfig.setMergedSegmentWarmer continue; } boolean found = false; for (LeafReaderContext currentReaderContext : previousReader.leaves()) { if (currentReaderContext.reader().getCoreCacheKey().equals(newReaderContext.reader().getCoreCacheKey())) { found = true; break; } } if (!found) { readers.add(newReaderContext.reader()); } } if (!readers.isEmpty()) { // we don't want to close the inner readers, just increase ref on them IndexReader newReader = new MultiReader(readers.toArray(new IndexReader[readers.size()]), false); newSearcher = super.newSearcher(newReader, null); closeNewSearcher = true; } } if (newSearcher != null) { IndicesWarmer.WarmerContext context = new IndicesWarmer.WarmerContext(shardId, new Searcher("warmer", newSearcher)); warmer.warmNewReaders(context); } warmer.warmTopReader(new IndicesWarmer.WarmerContext(shardId, new Searcher("warmer", searcher))); } catch (Throwable e) { if (isEngineClosed.get() == false) { logger.warn("failed to prepare/warm", e); } } finally { // no need to release the fullSearcher, nothing really is done... if (newSearcher != null && closeNewSearcher) { IOUtils.closeWhileHandlingException(newSearcher.getIndexReader()); // ignore } } } return searcher; } } public void activateThrottling() { throttle.activate(); } public void deactivateThrottling() { throttle.deactivate(); } long getGcDeletesInMillis() { return engineConfig.getGcDeletesInMillis(); } LiveIndexWriterConfig getCurrentIndexWriterConfig() { return indexWriter.getConfig(); } class FailEngineOnMergeFailure implements MergeSchedulerProvider.FailureListener { @Override public void onFailedMerge(MergePolicy.MergeException e) { if (Lucene.isCorruptionException(e)) { failEngine("corrupt file detected source: [merge]", e); } else { failEngine("merge exception", e); } } } class MergeSchedulerListener implements MergeSchedulerProvider.Listener { private final AtomicInteger numMergesInFlight = new AtomicInteger(0); private final AtomicBoolean isThrottling = new AtomicBoolean(); @Override public synchronized void beforeMerge(OnGoingMerge merge) { int maxNumMerges = mergeScheduler.getMaxMerges(); if (numMergesInFlight.incrementAndGet() > maxNumMerges) { if (isThrottling.getAndSet(true) == false) { logger.info("now throttling indexing: numMergesInFlight={}, maxNumMerges={}", numMergesInFlight, maxNumMerges); indexingService.throttlingActivated(); activateThrottling(); } } } @Override public synchronized void afterMerge(OnGoingMerge merge) { int maxNumMerges = mergeScheduler.getMaxMerges(); if (numMergesInFlight.decrementAndGet() < maxNumMerges) { if (isThrottling.getAndSet(false)) { logger.info("stop throttling indexing: numMergesInFlight={}, maxNumMerges={}", numMergesInFlight, maxNumMerges); indexingService.throttlingDeactivated(); deactivateThrottling(); } } } } private void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException { try { Translog.TranslogGeneration translogGeneration = translog.getGeneration(); logger.trace("committing writer with translog id [{}] and sync id [{}] ", translogGeneration.translogFileGeneration, syncId); Map<String, String> commitData = new HashMap<>(2); commitData.put(Translog.TRANSLOG_GENERATION_KEY, Long.toString(translogGeneration.translogFileGeneration)); commitData.put(Translog.TRANSLOG_UUID_KEY, translogGeneration.translogUUID); if (syncId != null) { commitData.put(Engine.SYNC_COMMIT_ID, syncId); } indexWriter.setCommitData(commitData); writer.commit(); } catch (Throwable ex) { failEngine("lucene commit failed", ex); throw ex; } } private void commitIndexWriter(IndexWriter writer, Translog translog) throws IOException { commitIndexWriter(writer, translog, null); } }
package com.sunil.selectmutiple; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.util.ArrayList; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.mime.FormBodyPart; import org.apache.http.entity.mime.HttpMultipartMode; import org.apache.http.entity.mime.MultipartEntityBuilder; import org.apache.http.entity.mime.content.ByteArrayBody; import org.apache.http.entity.mime.content.ContentBody; import org.apache.http.entity.mime.content.FileBody; import org.apache.http.entity.mime.content.InputStreamBody; import org.apache.http.entity.mime.content.StringBody; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.james.mime4j.message.BodyPart; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.Bitmap.CompressFormat; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.EditText; import android.widget.GridView; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.Switch; import android.widget.TextView; import android.widget.Toast; import android.widget.ViewSwitcher; import com.kns.adapter.GalleryAdapter; import com.kns.util.CustomMultiPartEntity; import com.kns.util.CustomMultiPartEntity.ProgressListener; import com.kns.util.DoUpload; import com.kns.util.ImageConstant; import com.kns.util.ImageUtil; import com.kns.web.WebHelper; import com.nostra13.universalimageloader.cache.memory.impl.WeakMemoryCache; import com.nostra13.universalimageloader.core.DisplayImageOptions; import com.nostra13.universalimageloader.core.ImageLoader; import com.nostra13.universalimageloader.core.ImageLoaderConfiguration; import com.nostra13.universalimageloader.core.assist.ImageScaleType; public class GridShowImageActivity extends Activity implements OnClickListener, OnSeekBarChangeListener{ public static final String ACTION_MULTIPLE_PICK = "ACTION_MULTIPLE_PICK"; private static final String TAG="GridShowImageActivity"; GridView gridGallery; Handler handler; GalleryAdapter adapter; ImageView imgSinglePick; //ImageButton btnGalleryPick; ImageButton btnGalleryPickMul; ImageButton btn_uploadpics; ImageButton btn_back=null; String action; ViewSwitcher viewSwitcher; ImageLoader imageLoader; Context context=null; private ProgressDialog prodialog=null; private ProgressDialog prodialog1=null; ArrayList<CustomGallery> dataT = new ArrayList<CustomGallery>(); private Button btn_login=null; private Button btn_cancel=null; private EditText edit_username=null; private EditText edit_pasw=null; private TextView textseekbar=null; private String username; private String password; private static SharedPreferences Prefs = null; private static String prefname = "galleryPrefs"; long totalsize; int seekbarprogress=0; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.gridshow); context=this; initImageLoader(); handler = new Handler(); gridGallery = (GridView) findViewById(R.id.gridGallery); gridGallery.setFastScrollEnabled(true); adapter = new GalleryAdapter(GridShowImageActivity.this, imageLoader); adapter.setMultiplePick(false); gridGallery.setAdapter(adapter); viewSwitcher = (ViewSwitcher) findViewById(R.id.viewSwitcher); viewSwitcher.setDisplayedChild(1); btnGalleryPickMul = (ImageButton) findViewById(R.id.btnGalleryPickMul); btn_uploadpics = (ImageButton) findViewById(R.id.button_pictureupload); btn_back=(ImageButton) findViewById(R.id.imageButton_back); btnGalleryPickMul.setOnClickListener(this); btn_uploadpics.setOnClickListener(this); btn_back.setOnClickListener(this); } private void initImageLoader() { DisplayImageOptions defaultOptions = new DisplayImageOptions.Builder() .cacheOnDisc().imageScaleType(ImageScaleType.EXACTLY_STRETCHED) .bitmapConfig(Bitmap.Config.RGB_565).build(); ImageLoaderConfiguration.Builder builder = new ImageLoaderConfiguration.Builder( this).defaultDisplayImageOptions(defaultOptions).memoryCache( new WeakMemoryCache()); ImageLoaderConfiguration config = builder.build(); imageLoader = ImageLoader.getInstance(); imageLoader.init(config); } @Override protected void onResume() { Intent intent=getIntent(); if (intent !=null) { String[] all_path = intent.getStringArrayExtra("all_path"); //ArrayList<CustomGallery> dataT = new ArrayList<CustomGallery>(); if (all_path.length > 0) { dataT.clear(); for (String string : all_path) { CustomGallery item = new CustomGallery(); item.sdcardPath = string; //Log.v(TAG, "path is: "+string); ImageUtil.galleryLog(TAG, "path is: "+string); dataT.add(item); } //viewSwitcher.setDisplayedChild(0); adapter.addAll(dataT); } } super.onResume(); } /*@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == 100 && resultCode == Activity.RESULT_OK) { adapter.clear(); viewSwitcher.setDisplayedChild(1); String single_path = data.getStringExtra("single_path"); imageLoader.displayImage("file://" + single_path, imgSinglePick); } else if (requestCode == 200 && resultCode == Activity.RESULT_OK) { String[] all_path = data.getStringArrayExtra("all_path"); //ArrayList<CustomGallery> dataT = new ArrayList<CustomGallery>(); dataT.clear(); for (String string : all_path) { CustomGallery item = new CustomGallery(); item.sdcardPath = string; Log.v(TAG, "path is: "+string); dataT.add(item); } //viewSwitcher.setDisplayedChild(0); adapter.addAll(dataT); } }*/ @Override public void onClick(View v) { if (btnGalleryPickMul==v) { Intent i = new Intent(ACTION_MULTIPLE_PICK); startActivityForResult(i, 200); finish(); } else if (btn_uploadpics==v) { if (dataT.size() > 0) { //alertBox(); boolean isinternet=ImageUtil.isInternetOn(context); if (isinternet) { //prodialog=ProgressDialog.show(context, "", "Uploading file.."); customalertdialog(); /*UploadingTask task=new UploadingTask(); task.execute();*/ } else{ ImageUtil.showAlert(GridShowImageActivity.this, "Internet Connection Error."); } } else{ ImageUtil.showAlert(GridShowImageActivity.this, "No image available for upload."); } } else if (btn_back==v) { finish(); } } /* public String postFile(ArrayList<CustomGallery> filelist, String member_id) throws Exception { Log.v(TAG, "file size is: "+filelist.size()); HttpClient client = new DefaultHttpClient(); HttpPost post = new HttpPost("http://23.21.71.132/KNSGallery/image_fileupload.php"); MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE); for (int i = 1; i <= filelist.size(); i++) { CustomGallery gallery=filelist.get(i-1); String filename=gallery.sdcardPath; Log.v(TAG, "file name: "+filename); File file = new File(filename); FileBody fb = new FileBody(file); builder.addPart("file"+i, fb); } final File file = new File(fileName); FileBody fb = new FileBody(file); // builder.addPart("file", fb); builder.addTextBody("member_id", member_id); builder.addTextBody("count", String.valueOf(filelist.size())); final HttpEntity yourEntity = builder.build(); class ProgressiveEntity implements HttpEntity { @Override public void consumeContent() throws IOException { yourEntity.consumeContent(); } @Override public InputStream getContent() throws IOException, IllegalStateException { return yourEntity.getContent(); } @Override public Header getContentEncoding() { return yourEntity.getContentEncoding(); } @Override public long getContentLength() { totalsize=yourEntity.getContentLength(); return totalsize; } @Override public Header getContentType() { return yourEntity.getContentType(); } @Override public boolean isChunked() { return yourEntity.isChunked(); } @Override public boolean isRepeatable() { return yourEntity.isRepeatable(); } @Override public boolean isStreaming() { return yourEntity.isStreaming(); } // CONSIDER put a _real_ delegator into here! @Override public void writeTo(OutputStream outstream) throws IOException { class ProxyOutputStream extends FilterOutputStream { public ProxyOutputStream(OutputStream proxy) { super(proxy); } public void write(int idx) throws IOException { out.write(idx); } public void write(byte[] bts) throws IOException { out.write(bts); } public void write(byte[] bts, int st, int end) throws IOException { out.write(bts, st, end); } public void flush() throws IOException { out.flush(); } public void close() throws IOException { out.close(); } } // CONSIDER import this class (and risk more Jar File Hell) class ProgressiveOutputStream extends ProxyOutputStream { int totalSent; public ProgressiveOutputStream(OutputStream proxy) { super(proxy); totalSent = 0; } public void write(byte[] bts, int st, int end) throws IOException { // FIXME Put your progress bar stuff here! totalSent+=end; prodialog1.setProgress((int)(totalSent / (float) totalsize) * 100); out.write(bts, st, end); } } yourEntity.writeTo(new ProgressiveOutputStream(outstream)); } }; ProgressiveEntity myEntity = new ProgressiveEntity(); post.setEntity(myEntity); HttpResponse response = client.execute(post); return getContent(response); } */ public String getContent(HttpResponse response) throws IOException { BufferedReader rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent())); String body = ""; String content = ""; while ((body = rd.readLine()) != null) { content += body + "\n"; } return content.trim(); } private class UploadingTask extends AsyncTask<String, Integer, String> { @Override protected void onPreExecute() { prodialog1 = new ProgressDialog(context); prodialog1.setTitle("In progress..."); prodialog1.setMessage("Uploading Images..."); //prodialog1.setIcon(R.drawable.upload); prodialog1.setCanceledOnTouchOutside(false); //prodialog1.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); //prodialog1.setProgressDrawable(getResources().getDrawable(R.drawable.custom_progress)); prodialog1.setCancelable(true); prodialog1.setButton(DialogInterface.BUTTON_NEGATIVE, "Upload In Background", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); prodialog1.show(); } @Override protected String doInBackground(String... urls) { String response1 = ""; //http://23.21.71.132/KNSGallery/web_service.php?act=getmemID&ws=1&devID=123&andID=234 Prefs = context.getSharedPreferences(prefname, Context.MODE_PRIVATE); String memberid=Prefs.getString(ImageConstant.MEMBERID, ""); try { String url=ImageConstant.BASEURL+"image_fileupload.php"; HttpClient client = new DefaultHttpClient(); HttpPost post = new HttpPost(url); MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE); Log.v(TAG, "SeekBar values is: "+seekbarprogress); for (int i = 1; i <= dataT.size(); i++) { CustomGallery gallery=dataT.get(i-1); String filename=gallery.sdcardPath; //Log.v(TAG, "file name: "+filename); ImageUtil.galleryLog(TAG, "file name: "+filename); /* File imagefile = new File(filename); FileInputStream fis = null; try { fis = new FileInputStream(imagefile); } catch (FileNotFoundException e) { e.printStackTrace(); } Bitmap bm = BitmapFactory.decodeStream(fis); ByteArrayOutputStream baos = new ByteArrayOutputStream(); bm.compress(Bitmap.CompressFormat.PNG, 100 , baos); ContentBody foto = new ByteArrayBody(baos.toByteArray(), filename); */ //Bitmap bmp=convertBitmap(filename); //int getvalue=getseekbarValue(seekbarprogress); Bitmap bmp=getwidthheight(filename, seekbarprogress); //Bitmap bmp=decodeSampledBitmapFromPath(filename, 750, 500); Log.v(TAG, "Bitmap width is: "+bmp.getWidth()); Log.v(TAG, "Bitmap height is: "+bmp.getHeight()); //Bitmap bmp = BitmapFactory.decodeFile(filename); ByteArrayOutputStream bos = new ByteArrayOutputStream(); bmp.compress(CompressFormat.JPEG, seekbarprogress, bos); ContentBody foto = new ByteArrayBody(bos.toByteArray(), filename); //ContentBody foto = new InputStreamBody(in, "image/jpeg", filename); //FileBody fb = new FileBody; //File file = new File(filename); // FileBody fb = new FileBody(file); builder.addPart("file"+i, foto); } /* final File file = new File(fileName); FileBody fb = new FileBody(file);*/ // builder.addPart("file", fb); builder.addTextBody("member_id", memberid); builder.addTextBody("count", String.valueOf(dataT.size())); final HttpEntity yourEntity = builder.build(); CustomMultiPartEntity entity=new CustomMultiPartEntity(yourEntity, new ProgressListener() { @Override public void transferred(long num) { publishProgress((int) ((num / (float) totalsize) * 100)); //Log.v(TAG, "publish progress :"+totalsize); ImageUtil.galleryLog(TAG,"publish progress :"+totalsize); } }); totalsize = entity.getContentLength(); //Log.v(TAG, "total size is: "+totalsize); ImageUtil.galleryLog(TAG,"total size is: "+totalsize); post.setEntity(entity); HttpResponse response = client.execute(post); response1= getContent(response); // response=postFile(dataT, memberid); //Log.v(TAG, "response is: "+ response1); ImageUtil.galleryLog(TAG,"response is: "+ response1); } catch (Exception e) { e.printStackTrace(); } return response1; } @Override protected void onProgressUpdate(Integer... progress) { prodialog1.setProgress((progress[0])); // Log.v(TAG, "onprogressupdate :"+progress[0]); ImageUtil.galleryLog(TAG,"onprogressupdate :"+progress[0]); } @Override protected void onPostExecute(String resultString) { prodialog1.dismiss(); Log.v(TAG, "onPostExecute called"); //Log.v(TAG, "Response is: "+resultString); ImageUtil.galleryLog(TAG,"Response is: "+resultString); if (resultString != null) { try { JSONObject jsonobj=new JSONObject(resultString); String resp=jsonobj.getString("Success"); //ImageUtil.showAlert(GridShowImageActivity.this, resp); Toast.makeText(getApplicationContext(), resp, Toast.LENGTH_LONG).show(); } catch (JSONException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } } } private void customalertdialog(){ LayoutInflater li = LayoutInflater.from(context); View promptsView = li.inflate(R.layout.custom_seekbar, null); AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(context); alertDialogBuilder.setView(promptsView); SeekBar seekbar=(SeekBar)promptsView.findViewById(R.id.SeekBar_compress); seekbarprogress=100; seekbar.setProgress(seekbarprogress); seekbar.setOnSeekBarChangeListener(this); textseekbar = (TextView) promptsView.findViewById(R.id.textView_seedbarValue); textseekbar.setText(100+" %"); alertDialogBuilder .setCancelable(false) .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog,int id) { dialog.dismiss(); UploadingTask task=new UploadingTask(); task.execute(); } }); AlertDialog alertDialog = alertDialogBuilder.create(); alertDialog.show(); } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { seekbarprogress=progress; textseekbar.setText(seekbarprogress+" %"); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } public static Bitmap convertBitmap(String path) { Bitmap bitmap=null; BitmapFactory.Options bfOptions=new BitmapFactory.Options(); bfOptions.inDither=false; //Disable Dithering mode bfOptions.inPurgeable=true; //Tell to gc that whether it needs free memory, the Bitmap can be cleared bfOptions.inInputShareable=true; //Which kind of reference will be used to recover the Bitmap data after being clear, when it will be used in the future bfOptions.inTempStorage=new byte[32 * 1024]; File file=new File(path); FileInputStream fs=null; try { fs = new FileInputStream(file); } catch (FileNotFoundException e) { e.printStackTrace(); } try { if(fs!=null) { bitmap=BitmapFactory.decodeFileDescriptor(fs.getFD(), null, bfOptions); } } catch (IOException e) { e.printStackTrace(); } finally{ if(fs!=null) { try { fs.close(); } catch (IOException e) { e.printStackTrace(); } } } return bitmap; } public int getseekbarValue(int seekbarvalue){ if (seekbarvalue < 20 && seekbarvalue >= 0) { Log.v(TAG, "return 1"); return 1; } else if (seekbarvalue < 40 && seekbarvalue >= 20) { Log.v(TAG, "return 2"); return 2; } else if (seekbarvalue < 60 && seekbarvalue >= 40) { Log.v(TAG, "return 3"); return 3; } else if (seekbarvalue < 80 && seekbarvalue >= 60) { Log.v(TAG, "return 4"); return 4; } else if (seekbarvalue <= 100 && seekbarvalue >= 80) { Log.v(TAG, "return 5"); return 5; } else{ return 5; } } public Bitmap getwidthheight(String filename, int seekbarvalue){ Bitmap bmp=null; int getvalue=getseekbarValue(seekbarvalue); switch (getvalue) { case 1: Log.v(TAG, "case 1"); bmp=decodeSampledBitmapFromPath(filename, 550, 350); break; case 2: Log.v(TAG, "case 2"); bmp=decodeSampledBitmapFromPath(filename, 650, 450); break; case 3: Log.v(TAG, "case 3"); bmp=decodeSampledBitmapFromPath(filename, 750, 550); break; case 4: Log.v(TAG, "case 4"); bmp=decodeSampledBitmapFromPath(filename, 850, 650); break; case 5: Log.v(TAG, "case 5"); bmp=decodeSampledBitmapFromPath(filename, 1000, 800); break; default: Log.v(TAG, "case default"); bmp=decodeSampledBitmapFromPath(filename, 1000, 800); break; } return bmp; } /* private void alertBox() { LayoutInflater li = LayoutInflater.from(context); View promptsView = li.inflate(R.layout.login, null); AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(context); // set prompts.xml to alertdialog builder alertDialogBuilder.setView(promptsView); edit_username=(EditText)promptsView.findViewById(R.id.editText_username); edit_pasw=(EditText)promptsView.findViewById(R.id.editText_pass); // set dialog message alertDialogBuilder.setCancelable(false).setPositiveButton("Login", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }); alertDialogBuilder.setCancelable(false).setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }); // create alert dialog AlertDialog alertDialog = alertDialogBuilder.create(); // show it alertDialog.show(); Button theButton = alertDialog .getButton(DialogInterface.BUTTON_POSITIVE); theButton.setOnClickListener(new CustomListener(alertDialog)); }*/ /* class CustomListener implements View.OnClickListener { private final Dialog dialog; public CustomListener(Dialog dialog) { this.dialog = dialog; } @Override public void onClick(View v) { username=edit_username.getText().toString().trim(); password=edit_pasw.getText().toString().trim(); if (username.length() < 1) { Toast.makeText(context, "Username is required.", Toast.LENGTH_LONG).show(); } else if (password.length() < 1) { Toast.makeText(context, "Password is required.", Toast.LENGTH_LONG).show(); } else{ boolean isinternet=ImageUtil.isInternetOn(context); if (isinternet) { dialog.dismiss(); prodialog=ProgressDialog.show(context, "", "Authenticating..."); LoginTask task=new LoginTask(); task.execute(); } else{ ImageUtil.showAlert(GridShowImageActivity.this, "Internet Connection Error."); } } } } */ /* private class LoginTask extends AsyncTask<String, Void, String> { String response1 = ""; @Override protected String doInBackground(String... urls) { //http://23.21.71.132/KNSGallery/web_service.php/?act=getmemID&ws=1&uname=sunil&psword=sunil123 String url=ImageConstant.BASEURL; StringBuilder sb=new StringBuilder(); sb.append(url); sb.append("&act=getmemID"); sb.append("&ws=1"); sb.append("&uname="+username); sb.append("&psword="+password); String httpurl=sb.toString().replace(" ", "%20"); Log.v(TAG, "url is: "+httpurl); HttpClient httpclient = new DefaultHttpClient(); HttpPost httppost = new HttpPost(httpurl); try { HttpResponse response = httpclient.execute(httppost); InputStream is = response.getEntity().getContent(); WebHelper webHelper = new WebHelper(); response1 = webHelper.convertStreamToString(is); } catch (ClientProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return response1; } @Override protected void onPostExecute(String resultString) { prodialog.dismiss(); Log.v(TAG, "onPostExecute called"); Log.v(TAG, "Response is: "+resultString); if (resultString != null) { try{ JSONObject jsonobj=new JSONObject(resultString); String status=jsonobj.getString("Status"); if (status.equalsIgnoreCase("Success")) { String memberid=jsonobj.getString("MemberID"); String username=jsonobj.getString("UserName"); Prefs = context.getSharedPreferences(prefname, Context.MODE_PRIVATE); SharedPreferences.Editor editor = Prefs.edit(); editor.putString(ImageConstant.USERNAME, username); editor.putString(ImageConstant.MEMBERID, memberid); editor.commit(); // prodialog=ProgressDialog.show(context, "", "Uploading file.."); UploadingTask task=new UploadingTask(); task.execute(); } else if (status.equalsIgnoreCase("Invalid Login Data")) { ImageUtil.showAlert(GridShowImageActivity.this, "You are not an authenticated user. Please register on website."); } }catch (JSONException e) { e.printStackTrace(); }catch (Exception e) { e.printStackTrace(); } } else{ ImageUtil.showAlert(GridShowImageActivity.this, "Problem in Internet Processing.Please try again."); } } }*/ public Bitmap decodeSampledBitmapFromPath(String path, int reqWidth, int reqHeight) { final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeFile(path, options); Log.v(TAG, "before compression width: "+options.outWidth); Log.v(TAG, "before compression heigth: "+options.outHeight); options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; Bitmap bmp = BitmapFactory.decodeFile(path, options); return bmp; } public static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) { final int height = options.outHeight; final int width = options.outWidth; int inSampleSize = 1; if (height > reqHeight || width > reqWidth) { if (width > height) { inSampleSize = Math.round((float) height / (float) reqHeight); } else { inSampleSize = Math.round((float) width / (float) reqWidth); } } return inSampleSize; } }
package com.deviantart.kafka_connect_s3; import java.io.BufferedWriter; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.zip.GZIPOutputStream; import org.json.simple.JSONArray; import org.json.simple.JSONObject; /** * BlockGZIPFileWriter accumulates newline delimited UTF-8 records and writes them to an * output file that is readable by GZIP. * * In fact this file is the concatenation of possibly many separate GZIP files corresponding to smaller chunks * of the input. Alongside the output filename.gz file, a file filename-index.json is written containing JSON * metadata about the size and location of each block. * * This allows a reading class to skip to particular line/record without decompressing whole file by looking up * the offset of the containing block, seeking to it and beginning GZIp read from there. * * This is especially useful when the file is an archive in HTTP storage like Amazon S3 where GET request with * range headers can allow pulling a small segment from overall compressed file. * * Note that thanks to GZIP spec, the overall file is perfectly valid and will compress as if it was a single stream * with any regular GZIP decoding library or program. */ public class BlockGZIPFileWriter { private String filenameBase; private String path; private GZIPOutputStream gzipStream; private BufferedWriter writer; private CountingOutputStream fileStream; private class Chunk { public long rawBytes = 0; public long byteOffset = 0; public long compressedByteLength = 0; public long firstOffset = 0; public long numRecords = 0; }; private class CountingOutputStream extends FilterOutputStream { private long numBytes = 0; CountingOutputStream(OutputStream out) throws IOException { super(out); } @Override public void write(int b) throws IOException { out.write(b); numBytes++; } @Override public void write(byte[] b) throws IOException { out.write(b); numBytes += b.length; } @Override public void write(byte[] b, int off, int len) throws IOException { out.write(b, off, len); numBytes += len; } public long getNumBytesWritten() { return numBytes; } }; private ArrayList<Chunk> chunks; // Default each chunk is 64MB of uncompressed data private long chunkThreshold; // Offset to the first record. // Set to non-zero if this file is part of a larger stream and you want // record offsets in the index to reflect the global offset rather than local private long firstRecordOffset; public BlockGZIPFileWriter(String filenameBase, String path) throws FileNotFoundException, IOException { this(filenameBase, path, 0, 67108864); } public BlockGZIPFileWriter(String filenameBase, String path, long firstRecordOffset) throws FileNotFoundException, IOException { this(filenameBase, path, firstRecordOffset, 67108864); } public BlockGZIPFileWriter(String filenameBase, String path, long firstRecordOffset, long chunkThreshold) throws FileNotFoundException, IOException { this.filenameBase = filenameBase; this.path = path; this.firstRecordOffset = firstRecordOffset; this.chunkThreshold = chunkThreshold; chunks = new ArrayList<Chunk>(); // Initialize first chunk Chunk ch = new Chunk(); ch.firstOffset = firstRecordOffset; chunks.add(ch); // Explicitly truncate the file. On linux and OS X this appears to happen // anyway when opening with FileOutputStream but that behavior is not actually documented // or specified anywhere so let's be rigorous about it. FileOutputStream fos = new FileOutputStream(new File(getDataFilePath())); fos.getChannel().truncate(0); // Open file for writing and setup this.fileStream = new CountingOutputStream(fos); initChunkWriter(); } private void initChunkWriter() throws IOException, UnsupportedEncodingException { gzipStream = new GZIPOutputStream(fileStream); writer = new BufferedWriter(new OutputStreamWriter(gzipStream, "UTF-8")); } private Chunk currentChunk() { return chunks.get(chunks.size() - 1); } public long getFirstRecordOffset() { return firstRecordOffset; } public String getDataFileName() { return String.format("%s-%012d.gz", filenameBase, firstRecordOffset); } public String getIndexFileName() { return String.format("%s-%012d.index.json", filenameBase, firstRecordOffset); } public String getDataFilePath() { return String.format("%s/%s", path, this.getDataFileName()); } public String getIndexFilePath() { return String.format("%s/%s", path, this.getIndexFileName()); } /** * Writes string to file, assuming this is a single record * * If there is no newline at then end we will add one */ public void write(String record) throws IOException { Chunk ch = currentChunk(); boolean hasNewLine = record.endsWith("\n"); int rawBytesToWrite = record.length(); if (!hasNewLine) { rawBytesToWrite += 1; } if ((ch.rawBytes + rawBytesToWrite) > chunkThreshold) { finishChunk(); initChunkWriter(); Chunk newCh = new Chunk(); newCh.firstOffset = ch.firstOffset + ch.numRecords; newCh.byteOffset = ch.byteOffset + ch.compressedByteLength; chunks.add(newCh); ch = newCh; } writer.append(record); if (!hasNewLine) { writer.newLine(); } ch.rawBytes += rawBytesToWrite; ch.numRecords++; } public void delete() throws IOException { deleteIfExists(getDataFilePath()); deleteIfExists(getIndexFilePath()); } private void deleteIfExists(String path) throws IOException { File f = new File(path); if (f.exists() && !f.isDirectory()) { f.delete(); } } private void finishChunk() throws IOException { Chunk ch = currentChunk(); // Complete GZIP block without closing stream writer.flush(); gzipStream.finish(); // We can no find out how long this chunk was compressed long bytesWritten = fileStream.getNumBytesWritten(); ch.compressedByteLength = bytesWritten - ch.byteOffset; } public void close() throws IOException { // Flush last chunk, updating index finishChunk(); // Now close the writer (and the whole stream stack) writer.close(); writeIndex(); } private void writeIndex() throws IOException { JSONArray chunkArr = new JSONArray(); for (Chunk ch : chunks) { JSONObject chunkObj = new JSONObject(); chunkObj.put("first_record_offset", ch.firstOffset); chunkObj.put("num_records", ch.numRecords); chunkObj.put("byte_offset", ch.byteOffset); chunkObj.put("byte_length", ch.compressedByteLength); chunkObj.put("byte_length_uncompressed", ch.rawBytes); chunkArr.add(chunkObj); } JSONObject index = new JSONObject(); index.put("chunks", chunkArr); try (FileWriter file = new FileWriter(getIndexFilePath())) { file.write(index.toJSONString()); file.close(); } } public int getTotalUncompressedSize() { int totalBytes = 0; for (Chunk ch : chunks) { totalBytes += ch.rawBytes; } return totalBytes; } public int getNumChunks() { return chunks.size(); } public int getNumRecords() { int totalRecords = 0; for (Chunk ch : chunks) { totalRecords += ch.numRecords; } return totalRecords; } }
package hudson.util; import hudson.Functions; import hudson.Util; import hudson.model.TaskListener; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.InvalidPathException; import javax.crypto.Cipher; import javax.crypto.SecretKey; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.security.GeneralSecurityException; import java.security.InvalidKeyException; import java.util.Base64; import java.util.HashSet; import java.util.Set; /** * Rewrites XML files by looking for Secrets that are stored with the old key and replaces them * by the new encrypted values. * * @author Kohsuke Kawaguchi */ public class SecretRewriter { private final Cipher cipher; private final SecretKey key; /** * How many files have been scanned? */ private int count; /** * Canonical paths of the directories we are recursing to protect * against symlink induced cycles. */ private Set<String> callstack = new HashSet<>(); public SecretRewriter() throws GeneralSecurityException { cipher = Secret.getCipher("AES"); key = HistoricalSecrets.getLegacyKey(); } /** @deprecated SECURITY-376: {@code backupDirectory} is ignored */ @Deprecated public SecretRewriter(File backupDirectory) throws GeneralSecurityException { this(); } private String tryRewrite(String s) throws IOException, InvalidKeyException { if (s.length()<24) return s; // Encrypting "" in Secret produces 24-letter characters, so this must be the minimum length if (!isBase64(s)) return s; // decode throws IOException if the input is not base64, and this is also a very quick way to filter byte[] in; try { in = Base64.getDecoder().decode(s.getBytes(StandardCharsets.UTF_8)); } catch (IllegalArgumentException e) { return s; // not a valid base64 } cipher.init(Cipher.DECRYPT_MODE, key); Secret sec = HistoricalSecrets.tryDecrypt(cipher, in); if(sec!=null) // matched return sec.getEncryptedValue(); // replace by the new encrypted value else // not encrypted with the legacy key. leave it unmodified return s; } /** @deprecated SECURITY-376: {@code backup} is ignored */ @Deprecated public boolean rewrite(File f, File backup) throws InvalidKeyException, IOException { return rewrite(f); } public boolean rewrite(File f) throws InvalidKeyException, IOException { AtomicFileWriter w = new AtomicFileWriter(f, "UTF-8"); try { boolean modified = false; // did we actually change anything? try (PrintWriter out = new PrintWriter(new BufferedWriter(w)); InputStream fin = Files.newInputStream(Util.fileToPath(f)); BufferedReader r = new BufferedReader(new InputStreamReader(fin, StandardCharsets.UTF_8))) { String line; StringBuilder buf = new StringBuilder(); while ((line = r.readLine()) != null) { int copied = 0; buf.setLength(0); while (true) { int sidx = line.indexOf('>', copied); if (sidx < 0) break; int eidx = line.indexOf('<', sidx); if (eidx < 0) break; String elementText = line.substring(sidx + 1, eidx); String replacement = tryRewrite(elementText); if (!replacement.equals(elementText)) modified = true; buf.append(line, copied, sidx + 1); buf.append(replacement); copied = eidx; } buf.append(line.substring(copied)); out.println(buf.toString()); } } if (modified) { w.commit(); } return modified; } finally { w.abort(); } } /** * Recursively scans and rewrites a directory. * * This method shouldn't abort just because one file fails to rewrite. * * @return * Number of files that were actually rewritten. */ // synchronized to prevent accidental concurrent use. this instance is not thread safe public synchronized int rewriteRecursive(File dir, TaskListener listener) throws InvalidKeyException { return rewriteRecursive(dir,"",listener); } private int rewriteRecursive(File dir, String relative, TaskListener listener) throws InvalidKeyException { String canonical; try { canonical = dir.getCanonicalPath(); } catch (IOException | InvalidPathException e) { canonical = dir.getAbsolutePath(); // } if (!callstack.add(canonical)) { listener.getLogger().println("Cycle detected: "+dir); return 0; } try { File[] children = dir.listFiles(); if (children==null) return 0; int rewritten=0; for (File child : children) { String cn = child.getName(); if (cn.endsWith(".xml")) { if ((count++)%100==0) listener.getLogger().println("Scanning "+child); try { if (rewrite(child)) { listener.getLogger().println("Rewritten "+child); rewritten++; } } catch (IOException e) { Functions.printStackTrace(e, listener.error("Failed to rewrite " + child)); } } if (child.isDirectory()) { if (!isIgnoredDir(child)) rewritten += rewriteRecursive(child, relative.length()==0 ? cn : relative+'/'+ cn, listener); } } return rewritten; } finally { callstack.remove(canonical); } } /** * Decides if this directory is worth visiting or not. */ protected boolean isIgnoredDir(File dir) { // ignoring the workspace and the artifacts directories. Both of them // are potentially large and they do not store any secrets. String n = dir.getName(); return n.equals("workspace") || n.equals("artifacts") || n.equals("plugins") // no mutable data here || n.equals(".") || n.equals(".."); } private static boolean isBase64(char ch) { return ch<128 && IS_BASE64[ch]; } private static boolean isBase64(String s) { for (int i=0; i<s.length(); i++) if (!isBase64(s.charAt(i))) return false; return true; } private static final boolean[] IS_BASE64 = new boolean[128]; static { String chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="; for (int i=0; i<chars.length();i++) IS_BASE64[chars.charAt(i)] = true; } }
package org.basex.core.parse; import static org.basex.core.Text.*; import static org.basex.util.Token.*; import java.util.*; import org.basex.core.*; import org.basex.core.cmd.*; import org.basex.core.cmd.List; import org.basex.core.cmd.Set; import org.basex.core.parse.Commands.Cmd; import org.basex.core.parse.Commands.CmdAlter; import org.basex.core.parse.Commands.CmdCreate; import org.basex.core.parse.Commands.CmdDrop; import org.basex.core.parse.Commands.CmdIndex; import org.basex.core.parse.Commands.CmdIndexInfo; import org.basex.core.parse.Commands.CmdInfo; import org.basex.core.parse.Commands.CmdOptimize; import org.basex.core.parse.Commands.CmdPerm; import org.basex.core.parse.Commands.CmdRepo; import org.basex.core.parse.Commands.CmdShow; import org.basex.query.*; import org.basex.query.value.item.*; import org.basex.util.*; import org.basex.util.list.*; /** * This is a parser for command strings, creating {@link Command} instances. * Several commands can be formulated in one string and separated by semicolons. * * @author BaseX Team 2005-15, BSD License * @author Christian Gruen */ final class StringParser extends CmdParser { /** Current parser. */ private InputParser parser; /** * Constructor. * @param input input * @param context database context */ StringParser(final String input, final Context context) { super(input, context); } @Override protected void parse(final ArrayList<Command> cmds) throws QueryException { final Scanner sc = new Scanner(input).useDelimiter(single ? "\0" : "\r\n?|\n"); while(sc.hasNext()) { final String line = sc.next().trim(); if(line.isEmpty() || line.startsWith("#")) continue; parser = new InputParser(line); parser.file = ctx.options.get(MainOptions.QUERYPATH); while(parser.more()) { final Cmd cmd = consume(Cmd.class, null); if(cmd != null) cmds.add(parse(cmd)); if(parser.more() && !parser.consume(';')) throw help(null, cmd); } } } /** * Parses a single command. * @param cmd command definition * @return resulting command * @throws QueryException query exception */ private Command parse(final Cmd cmd) throws QueryException { switch(cmd) { case CREATE: switch(consume(CmdCreate.class, cmd)) { case BACKUP: return new CreateBackup(glob(cmd)); case DATABASE: case DB: return new CreateDB(name(cmd), remaining(null)); case INDEX: return new CreateIndex(consume(CmdIndex.class, cmd)); case USER: return new CreateUser(name(cmd), password()); } break; case COPY: return new Copy(name(cmd), name(cmd)); case ALTER: switch(consume(CmdAlter.class, cmd)) { case DATABASE: case DB: return new AlterDB(name(cmd), name(cmd)); case PASSWORD: return new AlterPassword(name(cmd), password()); case USER: return new AlterUser(name(cmd), name(cmd)); } break; case OPEN: return new Open(name(cmd), string(null)); case CHECK: return new Check(string(cmd)); case ADD: final String aa = key(S_TO, null) ? string(cmd) : null; return new Add(aa, remaining(cmd)); case STORE: final String sa = key(S_TO, null) ? string(cmd) : null; return new Store(sa, remaining(cmd)); case RETRIEVE: return new Retrieve(string(cmd)); case DELETE: return new Delete(string(cmd)); case RENAME: return new Rename(string(cmd), string(cmd)); case REPLACE: return new Replace(string(cmd), remaining(cmd)); case INFO: switch(consume(CmdInfo.class, cmd)) { case NULL: return new Info(); case DATABASE: case DB: return new InfoDB(); case INDEX: return new InfoIndex(consume(CmdIndexInfo.class, null)); case STORAGE: String arg1 = number(); final String arg2 = arg1 != null ? number() : null; if(arg1 == null) arg1 = xquery(null); return new InfoStorage(arg1, arg2); } break; case INSPECT: return new Inspect(); case CLOSE: return new Close(); case LIST: return new List(name(null), string(null)); case DROP: switch(consume(CmdDrop.class, cmd)) { case DATABASE: case DB: return new DropDB(glob(cmd)); case INDEX: return new DropIndex(consume(CmdIndex.class, cmd)); case USER: return new DropUser(glob(cmd), key(ON, null) ? glob(cmd) : null); case BACKUP: return new DropBackup(glob(cmd)); } break; case OPTIMIZE: switch(consume(CmdOptimize.class, cmd)) { case NULL: return new Optimize(); case ALL: return new OptimizeAll(); } break; case EXPORT: return new Export(string(cmd)); case XQUERY: return new XQuery(xquery(cmd)); case RUN: return new Run(string(cmd)); case TEST: return new Test(string(cmd)); case EXECUTE: return new Execute(string(cmd, false)); case FIND: return new Find(string(cmd, false)); case GET: return new Get(name(null)); case SET: return new Set(name(cmd), string(null, false)); case PASSWORD: return new Password(password()); case HELP: return new Help(name(null)); case EXIT: case QUIT: return new Exit(); case FLUSH: return new Flush(); case KILL: return new Kill(string(cmd)); case RESTORE: return new Restore(name(cmd)); case SHOW: switch(consume(CmdShow.class, cmd)) { case SESSIONS: return new ShowSessions(); case USERS: return new ShowUsers(key(ON, null) ? name(cmd) : null); case BACKUPS: return new ShowBackups(); } break; case GRANT: final CmdPerm perm = consume(CmdPerm.class, cmd); if(perm == null) throw help(null, cmd); final String db = key(ON, null) ? glob(cmd) : null; key(S_TO, cmd); return new Grant(perm, glob(cmd), db); case REPO: switch(consume(CmdRepo.class, cmd)) { case INSTALL: return new RepoInstall(string(cmd), new InputInfo(parser)); case DELETE: return new RepoDelete(string(cmd), new InputInfo(parser)); case LIST: return new RepoList(); } break; } throw Util.notExpected("Command specified, but not implemented yet"); } /** * Parses and returns a string, delimited by a space or semicolon. * Quotes can be used to include spaces. * @param cmd referring command; if specified, the result must not be empty * @return string * @throws QueryException query exception */ private String string(final Cmd cmd) throws QueryException { return string(cmd, true); } /** * Parses and returns a string, delimited by a semicolon or, optionally, a space. * Quotes can be used to include spaces. * @param cmd referring command; if specified, the result must not be empty * @param space stop when encountering space * @return string * @throws QueryException query exception */ private String string(final Cmd cmd, final boolean space) throws QueryException { final StringBuilder sb = new StringBuilder(); consumeWS(); boolean q = false; while(parser.more()) { final char c = parser.curr(); if(!q && ((space ? c <= ' ' : c < ' ') || eoc())) break; if(c == '"') q ^= true; else sb.append(c); parser.consume(); } return finish(sb, cmd); } /** * Parses and returns the remaining string. Quotes at the beginning and end * of the argument will be stripped. * @param cmd referring command; if specified, the result must not be empty * @return remaining string * @throws QueryException query exception */ private String remaining(final Cmd cmd) throws QueryException { if(single) { final StringBuilder sb = new StringBuilder(); consumeWS(); while(parser.more()) sb.append(parser.consume()); String arg = finish(sb, cmd); if(arg != null) { // chop quotes; substrings are faster than replaces... if(arg.startsWith("\"")) arg = arg.substring(1); if(arg.endsWith("\"")) arg = arg.substring(0, arg.length() - 1); } return arg; } return string(cmd, false); } /** * Parses and returns an xquery expression. * @param cmd referring command; if specified, the result must not be empty * @return path * @throws QueryException query exception */ private String xquery(final Cmd cmd) throws QueryException { consumeWS(); final StringBuilder sb = new StringBuilder(); if(!eoc()) { try(final QueryContext qc = new QueryContext(ctx)) { final QueryParser p = new QueryParser(parser.input, null, qc, null); p.pos = parser.pos; p.parseMain(); sb.append(parser.input.substring(parser.pos, p.pos)); parser.pos = p.pos; } } return finish(sb, cmd); } /** * Parses and returns a command. A command is limited to letters. * @return name * @throws QueryException query exception */ private String command() throws QueryException { consumeWS(); final StringBuilder sb = new StringBuilder(); while(!eoc() && !ws(parser.curr())) { sb.append(parser.consume()); } return finish(sb, null); } /** * Parses and returns a name. A name may contain letters, numbers and any of the special * characters <code>!#$%&'()+-=@[]^_`{}~</code>. * @param cmd referring command; if specified, the result must not be empty * @return name * @throws QueryException query exception */ private String name(final Cmd cmd) throws QueryException { consumeWS(); final StringBuilder sb = new StringBuilder(); while(Databases.validChar(parser.curr())) sb.append(parser.consume()); return finish(eoc() || ws(parser.curr()) ? sb : null, cmd); } /** * Parses and returns a password string. * @return password string * @throws QueryException query exception */ private String password() throws QueryException { final String pw = string(null); return pw != null ? pw : pwReader == null ? "" : pwReader.password(); } /** * Parses and returns a glob expression, which extends {@link #name(Cmd)} function * with asterisks, question marks and commands. * @param cmd referring command; if specified, the result must not be empty * @return glob expression * @throws QueryException query exception */ private String glob(final Cmd cmd) throws QueryException { consumeWS(); final StringBuilder sb = new StringBuilder(); while(true) { final char ch = parser.curr(); if(!Databases.validChar(ch) && ch != '*' && ch != '?' && ch != ',') { return finish(eoc() || ws(ch) ? sb : null, cmd); } sb.append(parser.consume()); } } /** * Parses and returns the specified keyword. * @param key token to be parsed * @param cmd referring command; if specified, the keyword is mandatory * @return result of check * @throws QueryException query exception */ private boolean key(final String key, final Cmd cmd) throws QueryException { consumeWS(); final int p = parser.pos; final boolean ok = (parser.consume(key) || parser.consume( key.toLowerCase(Locale.ENGLISH))) && (parser.curr(0) || ws(parser.curr())); if(!ok) { parser.pos = p; if(cmd != null) throw help(null, cmd); } return ok; } /** * Parses and returns a string result. * @param string input string or {@code null} if invalid * @param cmd referring command; if specified, the result must not be empty * @return string result or {@code null} * @throws QueryException query exception */ private String finish(final StringBuilder string, final Cmd cmd) throws QueryException { if(string != null && string.length() != 0) return string.toString(); if(cmd != null) throw help(null, cmd); return null; } /** * Parses and returns a number. * @return name * @throws QueryException query exception */ private String number() throws QueryException { consumeWS(); final StringBuilder sb = new StringBuilder(); if(parser.curr() == '-') sb.append(parser.consume()); while(digit(parser.curr())) sb.append(parser.consume()); return finish(eoc() || ws(parser.curr()) ? sb : null, null); } /** * Consumes all whitespace characters from the beginning of the remaining * query. */ private void consumeWS() { final int pl = parser.length; while(parser.pos < pl && parser.input.charAt(parser.pos) <= ' ') ++parser.pos; parser.mark = parser.pos - 1; } /** * Returns the found command or throws an exception. * @param cmp possible completions * @param par parent command * @param <E> token type * @return index * @throws QueryException query exception */ private <E extends Enum<E>> E consume(final Class<E> cmp, final Cmd par) throws QueryException { final String token = command(); if(!suggest || token == null || !token.isEmpty()) { try { // return command reference; allow empty strings as input ("NULL") return Enum.valueOf(cmp, token == null ? "NULL" : token.toUpperCase(Locale.ENGLISH)); } catch(final IllegalArgumentException ignore) { } } final Enum<?>[] alt = startWith(cmp, token); // handle empty input if(token == null) { if(par != null) throw help(alt, par); if(suggest) throw error(alt, EXPECTING_CMD); return null; } // output error for similar commands final byte[] name = uc(token(token)); final Levenshtein ls = new Levenshtein(); for(final Enum<?> s : startWith(cmp, null)) { final byte[] sm = uc(token(s.name())); if(ls.similar(name, sm) && Cmd.class.isInstance(s)) { throw error(alt, UNKNOWN_SIMILAR_X, name, sm); } } // show unknown command error or available command extensions throw par == null ? error(alt, UNKNOWN_TRY_X, token) : help(alt, par); } /** * Returns help output as query exception instance. * Prints some command info. * @param alt input alternatives * @param cmd input completions * @return QueryException query exception */ private QueryException help(final Enum<?>[] alt, final Cmd cmd) { return error(alt, SYNTAX_X, cmd.help(true)); } /** * Returns all commands that start with the specified user input. * @param <T> token type * @param en available commands * @param prefix user input * @return completions */ private static <T extends Enum<T>> Enum<?>[] startWith(final Class<T> en, final String prefix) { Enum<?>[] list = new Enum<?>[0]; final String t = prefix == null ? "" : prefix.toUpperCase(Locale.ENGLISH); for(final Enum<?> e : en.getEnumConstants()) { if(e.name().startsWith(t)) { final int s = list.length; list = Array.copy(list, new Enum<?>[s + 1]); list[s] = e; } } return list; } /** * Checks if the end of a command has been reached. * @return true if command has ended */ private boolean eoc() { return !parser.more() || parser.curr() == ';'; } /** * Returns a query exception instance. * @param comp input completions * @param msg message * @param ext extension * @return query exception */ private QueryException error(final Enum<?>[] comp, final String msg, final Object... ext) { return new QueryException(parser.info(), new QNm(), msg, ext).suggest(parser, list(comp)); } /** * Converts the specified commands into a string list. * @param comp input completions * @return string list */ private static StringList list(final Enum<?>[] comp) { final StringList list = new StringList(); if(comp != null) { for(final Enum<?> c : comp) list.add(c.name().toLowerCase(Locale.ENGLISH)); } return list; } }
/* * Copyright (c) 2011, salesforce.com, inc. * All rights reserved. * Redistribution and use of this software in source and binary forms, with or * without modification, are permitted provided that the following conditions * are met: * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of salesforce.com, inc. nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission of salesforce.com, inc. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.androidsdk.auth; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.List; import org.apache.http.client.ClientProtocolException; import android.accounts.AbstractAccountAuthenticator; import android.accounts.Account; import android.accounts.AccountAuthenticatorResponse; import android.accounts.AccountManager; import android.accounts.NetworkErrorException; import android.app.ActivityManager; import android.app.Service; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.os.IBinder; import android.util.Log; import com.salesforce.androidsdk.app.SalesforceSDKManager; import com.salesforce.androidsdk.auth.OAuth2.OAuthFailedException; import com.salesforce.androidsdk.auth.OAuth2.TokenEndpointResponse; /** * The service used for taking care of authentication for a Salesforce-based application. * See {@link <a href="http://developer.android.com/reference/android/accounts/AbstractAccountAuthenticator.html">AbstractAccountAuthenticator</a>}. */ public class AuthenticatorService extends Service { private static Authenticator authenticator; // Keys to extra info in the account public static final String KEY_LOGIN_URL = "loginUrl"; public static final String KEY_INSTANCE_URL = "instanceUrl"; public static final String KEY_USER_ID = "userId"; public static final String KEY_CLIENT_ID = "clientId"; public static final String KEY_ORG_ID = "orgId"; public static final String KEY_USERNAME = "username"; public static final String KEY_ID_URL = "id"; public static final String KEY_CLIENT_SECRET = "clientSecret"; public static final String KEY_COMMUNITY_ID = "communityId"; public static final String KEY_COMMUNITY_URL = "communityUrl"; private Authenticator getAuthenticator() { if (authenticator == null) authenticator = new Authenticator(this); return authenticator; } @Override public IBinder onBind(Intent intent) { if (intent.getAction().equals(AccountManager.ACTION_AUTHENTICATOR_INTENT)) return getAuthenticator().getIBinder(); return null; } /** * The Authenticator for salesforce accounts. * - addAccount Start the login flow (by launching the activity filtering the salesforce.intent.action.LOGIN intent). * - getAuthToken Refresh the token by calling {@link OAuth2#refreshAuthToken(HttpAccess, URI, String, String) OAuth2.refreshAuthToken}. */ private static class Authenticator extends AbstractAccountAuthenticator { private static final String SETTINGS_PACKAGE_NAME = "com.android.settings"; private static final String ANDROID_PACKAGE_NAME = "androidPackageName"; private final Context context; Authenticator(Context ctx) { super(ctx); this.context = ctx; } @Override public Bundle addAccount( AccountAuthenticatorResponse response, String accountType, String authTokenType, String[] requiredFeatures, Bundle options) throws NetworkErrorException { // Log.i("Authenticator:addAccount", "Options: " + options); if (isAddFromSettings(options)) { options.putAll(SalesforceSDKManager.getInstance().getLoginOptions().asBundle()); } return makeAuthIntentBundle(response, options); } private boolean isAddFromSettings(Bundle options) { // Is there a better way? return options.containsKey(ANDROID_PACKAGE_NAME) && options.getString(ANDROID_PACKAGE_NAME).equals(SETTINGS_PACKAGE_NAME); } @SuppressWarnings("deprecation") @Override public Bundle getAccountRemovalAllowed(AccountAuthenticatorResponse response, Account account) { final Bundle result = new Bundle(); final ActivityManager manager = (ActivityManager) context.getSystemService(ACTIVITY_SERVICE); /* * Allowing account removal from the Settings app is quite messy, * since we don't know which account is being removed. Hence, we * check which package the account removal call is coming from, * and decide whether to allow it or not. Unfortunately, the only * way to do this is the convoluted way used below, which basically * gets a list of running tasks and get the topmost activity on * the task in focus. If the call is coming from the Settings app, * the topmost activity's package will be the Settings app. * * FIXME: The following piece of code does nothing on Lollipop and * above, since Google has revoked the ability to get the list of * running tasks outside of the application stack. We'll need to * figure out a different strategy to handle this. One approach * is to launch a custom logout flow for 'Settings' (if that's possible). */ boolean isNotRemoveFromSettings = true; if (manager != null) { final List<ActivityManager.RunningTaskInfo> task = manager.getRunningTasks(1); if (task != null && task.size() > 0) { final ComponentName componentInfo = task.get(0).topActivity; if (componentInfo != null) { if (SETTINGS_PACKAGE_NAME.equals(componentInfo.getPackageName())) { isNotRemoveFromSettings = false; } } } } result.putBoolean(AccountManager.KEY_BOOLEAN_RESULT, isNotRemoveFromSettings); return result; } /** * Uses the refresh token to get a new access token. * Remember that the authenticator runs under its own separate process, so if you want to debug you * need to attach to the :auth process, and not the main chatter process. */ @Override public Bundle getAuthToken( AccountAuthenticatorResponse response, Account account, String authTokenType, Bundle options) throws NetworkErrorException { final AccountManager mgr = AccountManager.get(context); final String passcodeHash = SalesforceSDKManager.getInstance().getPasscodeHash(); final String refreshToken = SalesforceSDKManager.decryptWithPasscode(mgr.getPassword(account), passcodeHash); final String loginServer = SalesforceSDKManager.decryptWithPasscode(mgr.getUserData(account, AuthenticatorService.KEY_LOGIN_URL), passcodeHash); final String clientId = SalesforceSDKManager.decryptWithPasscode(mgr.getUserData(account, AuthenticatorService.KEY_CLIENT_ID), passcodeHash); final String instServer = SalesforceSDKManager.decryptWithPasscode(mgr.getUserData(account, AuthenticatorService.KEY_INSTANCE_URL), passcodeHash); final String userId = SalesforceSDKManager.decryptWithPasscode(mgr.getUserData(account, AuthenticatorService.KEY_USER_ID), passcodeHash); final String orgId = SalesforceSDKManager.decryptWithPasscode(mgr.getUserData(account, AuthenticatorService.KEY_ORG_ID), passcodeHash); final String username = SalesforceSDKManager.decryptWithPasscode(mgr.getUserData(account, AuthenticatorService.KEY_USERNAME), passcodeHash); final String encClientSecret = mgr.getUserData(account, AuthenticatorService.KEY_CLIENT_SECRET); String clientSecret = null; if (encClientSecret != null) { clientSecret = SalesforceSDKManager.decryptWithPasscode(encClientSecret, passcodeHash); } final String encCommunityId = mgr.getUserData(account, AuthenticatorService.KEY_COMMUNITY_ID); String communityId = null; if (encCommunityId != null) { communityId = SalesforceSDKManager.decryptWithPasscode(encCommunityId, SalesforceSDKManager.getInstance().getPasscodeHash()); } final String encCommunityUrl = mgr.getUserData(account, AuthenticatorService.KEY_COMMUNITY_URL); String communityUrl = null; if (encCommunityUrl != null) { communityUrl = SalesforceSDKManager.decryptWithPasscode(encCommunityUrl, SalesforceSDKManager.getInstance().getPasscodeHash()); } final Bundle resBundle = new Bundle(); try { final TokenEndpointResponse tr = OAuth2.refreshAuthToken(HttpAccess.DEFAULT, new URI(loginServer), clientId, refreshToken, clientSecret); // Handle the case where the org has been migrated to a new instance, or has turned on my domains. if (!instServer.equalsIgnoreCase(tr.instanceUrl)) { mgr.setUserData(account, AuthenticatorService.KEY_INSTANCE_URL, SalesforceSDKManager.encryptWithPasscode(tr.instanceUrl, passcodeHash)); } // Update auth token in account. mgr.setUserData(account, AccountManager.KEY_AUTHTOKEN, SalesforceSDKManager.encryptWithPasscode(tr.authToken, passcodeHash)); resBundle.putString(AccountManager.KEY_ACCOUNT_NAME, account.name); resBundle.putString(AccountManager.KEY_ACCOUNT_TYPE, account.type); resBundle.putString(AccountManager.KEY_AUTHTOKEN, tr.authToken); resBundle.putString(AuthenticatorService.KEY_LOGIN_URL, SalesforceSDKManager.encryptWithPasscode(loginServer, passcodeHash)); resBundle.putString(AuthenticatorService.KEY_INSTANCE_URL, SalesforceSDKManager.encryptWithPasscode(instServer, passcodeHash)); resBundle.putString(AuthenticatorService.KEY_CLIENT_ID, SalesforceSDKManager.encryptWithPasscode(clientId, passcodeHash)); resBundle.putString(AuthenticatorService.KEY_USERNAME, SalesforceSDKManager.encryptWithPasscode(username, passcodeHash)); resBundle.putString(AuthenticatorService.KEY_USER_ID, SalesforceSDKManager.encryptWithPasscode(userId, passcodeHash)); resBundle.putString(AuthenticatorService.KEY_ORG_ID, SalesforceSDKManager.encryptWithPasscode(orgId, passcodeHash)); String encrClientSecret = null; if (clientSecret != null) { encrClientSecret = SalesforceSDKManager.encryptWithPasscode(clientSecret, passcodeHash); } resBundle.putString(AuthenticatorService.KEY_CLIENT_SECRET, encrClientSecret); String encrCommunityId = null; if (communityId != null) { encrCommunityId = SalesforceSDKManager.encryptWithPasscode(communityId, passcodeHash); } resBundle.putString(AuthenticatorService.KEY_COMMUNITY_ID, encrCommunityId); String encrCommunityUrl = null; if (communityUrl != null) { encrCommunityUrl = SalesforceSDKManager.encryptWithPasscode(communityUrl, passcodeHash); } resBundle.putString(AuthenticatorService.KEY_COMMUNITY_URL, encrCommunityUrl); // Log.i("Authenticator:getAuthToken", "Returning auth bundle for " + account.name); } catch (ClientProtocolException e) { Log.w("Authenticator:getAuthToken", "", e); throw new NetworkErrorException(e); } catch (IOException e) { Log.w("Authenticator:getAuthToken", "", e); throw new NetworkErrorException(e); } catch (URISyntaxException e) { Log.w("Authenticator:getAuthToken", "", e); throw new NetworkErrorException(e); } catch (OAuthFailedException e) { if (e.isRefreshTokenInvalid()) { Log.i("Authenticator:getAuthToken", "Invalid Refresh Token: (Error: " + e.response.error + ", Status Code: " + e.httpStatusCode + ")"); // the exception explicitly indicates that the refresh token is no longer valid. return makeAuthIntentBundle(response, options); } resBundle.putString(AccountManager.KEY_ERROR_CODE, e.response.error); resBundle.putString(AccountManager.KEY_ERROR_MESSAGE, e.response.errorDescription); } // Log.i("Authenticator:getAuthToken", "Result: " + resBundle); return resBundle; } /** * Return bundle with intent to start the login flow. * * @param response * @param options * @return */ private Bundle makeAuthIntentBundle(AccountAuthenticatorResponse response, Bundle options) { Bundle reply = new Bundle(); Intent i = new Intent(context, SalesforceSDKManager.getInstance().getLoginActivityClass()); i.setPackage(context.getPackageName()); i.setFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP); i.putExtra(AccountManager.KEY_ACCOUNT_AUTHENTICATOR_RESPONSE, response); if (options != null) i.putExtras(options); reply.putParcelable(AccountManager.KEY_INTENT, i); return reply; } @Override public Bundle updateCredentials(AccountAuthenticatorResponse response, Account account, String authTokenType, Bundle options) throws NetworkErrorException { return null; } @Override public Bundle confirmCredentials(AccountAuthenticatorResponse response, Account account, Bundle options) throws NetworkErrorException { return null; } @Override public Bundle editProperties(AccountAuthenticatorResponse response, String accountType) { return null; } @Override public String getAuthTokenLabel(String authTokenType) { return null; } @Override public Bundle hasFeatures(AccountAuthenticatorResponse response, Account account, String[] features) throws NetworkErrorException { return null; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test.discovery; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.FaultDetection; import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.discovery.zen.ZenPing; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.testframework.ESIntegTestCase; import org.elasticsearch.testframework.discovery.ClusterDiscoveryConfiguration; import org.elasticsearch.testframework.discovery.TestZenDiscovery; import org.elasticsearch.testframework.disruption.NetworkDisruption; import org.elasticsearch.testframework.disruption.NetworkDisruption.Bridge; import org.elasticsearch.testframework.disruption.NetworkDisruption.DisruptedLinks; import org.elasticsearch.testframework.disruption.NetworkDisruption.NetworkDisconnect; import org.elasticsearch.testframework.disruption.NetworkDisruption.NetworkLinkDisruptionType; import org.elasticsearch.testframework.disruption.NetworkDisruption.TwoPartitions; import org.elasticsearch.testframework.disruption.ServiceDisruptionScheme; import org.elasticsearch.testframework.disruption.SlowClusterStateProcessing; import org.elasticsearch.testframework.transport.MockTransportService; import org.elasticsearch.transport.TcpTransport; import org.junit.Before; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { static final TimeValue DISRUPTION_HEALING_OVERHEAD = TimeValue.timeValueSeconds(40); // we use 30s as timeout in many places. private ClusterDiscoveryConfiguration discoveryConfig; @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder().put(discoveryConfig.nodeSettings(nodeOrdinal)) .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false).build(); } @Before public void clearConfig() { discoveryConfig = null; } @Override protected int numberOfShards() { return 3; } @Override protected int numberOfReplicas() { return 1; } private boolean disableBeforeIndexDeletion; @Before public void setUp() throws Exception { super.setUp(); disableBeforeIndexDeletion = false; } @Override public void setDisruptionScheme(ServiceDisruptionScheme scheme) { if (scheme instanceof NetworkDisruption && ((NetworkDisruption) scheme).getNetworkLinkDisruptionType() instanceof NetworkDisruption.NetworkUnresponsive) { // the network unresponsive disruption may leave operations in flight // this is because this disruption scheme swallows requests by design // as such, these operations will never be marked as finished disableBeforeIndexDeletion = true; } super.setDisruptionScheme(scheme); } @Override protected void beforeIndexDeletion() throws Exception { if (disableBeforeIndexDeletion == false) { super.beforeIndexDeletion(); } } List<String> startCluster(int numberOfNodes) throws ExecutionException, InterruptedException { return startCluster(numberOfNodes, -1); } List<String> startCluster(int numberOfNodes, int minimumMasterNode) throws ExecutionException, InterruptedException { return startCluster(numberOfNodes, minimumMasterNode, null); } List<String> startCluster(int numberOfNodes, int minimumMasterNode, @Nullable int[] unicastHostsOrdinals) throws ExecutionException, InterruptedException { configureCluster(numberOfNodes, unicastHostsOrdinals, minimumMasterNode); List<String> nodes = internalCluster().startNodes(numberOfNodes); ensureStableCluster(numberOfNodes); // TODO: this is a temporary solution so that nodes will not base their reaction to a partition based on previous successful results ZenPing zenPing = ((TestZenDiscovery) internalCluster().getInstance(Discovery.class)).getZenPing(); if (zenPing instanceof UnicastZenPing) { ((UnicastZenPing) zenPing).clearTemporalResponses(); } return nodes; } static final Settings DEFAULT_SETTINGS = Settings.builder() .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put(TcpTransport.TCP_CONNECT_TIMEOUT.getKey(), "10s") // Network delay disruption waits for the min between this // value and the time of disruption and does not recover immediately // when disruption is stop. We should make sure we recover faster // then the default of 30s, causing ensureGreen and friends to time out .build(); @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(MockTransportService.TestPlugin.class); } void configureCluster( int numberOfNodes, @Nullable int[] unicastHostsOrdinals, int minimumMasterNode ) throws ExecutionException, InterruptedException { configureCluster(DEFAULT_SETTINGS, numberOfNodes, unicastHostsOrdinals, minimumMasterNode); } void configureCluster( Settings settings, int numberOfNodes, @Nullable int[] unicastHostsOrdinals, int minimumMasterNode ) throws ExecutionException, InterruptedException { if (minimumMasterNode < 0) { minimumMasterNode = numberOfNodes / 2 + 1; } logger.info("---> configured unicast"); // TODO: Rarely use default settings form some of these Settings nodeSettings = Settings.builder() .put(settings) .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numberOfNodes) .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode) .build(); if (discoveryConfig == null) { if (unicastHostsOrdinals == null) { discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings); } else { discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings, unicastHostsOrdinals); } } } ClusterState getNodeClusterState(String node) { return client(node).admin().cluster().prepareState().setLocal(true).get().getState(); } void assertNoMaster(final String node) throws Exception { assertNoMaster(node, null, TimeValue.timeValueSeconds(10)); } void assertNoMaster(final String node, TimeValue maxWaitTime) throws Exception { assertNoMaster(node, null, maxWaitTime); } void assertNoMaster(final String node, @Nullable final ClusterBlock expectedBlocks, TimeValue maxWaitTime) throws Exception { assertBusy(() -> { ClusterState state = getNodeClusterState(node); final DiscoveryNodes nodes = state.nodes(); assertNull("node [" + node + "] still has [" + nodes.getMasterNode() + "] as master", nodes.getMasterNode()); if (expectedBlocks != null) { for (ClusterBlockLevel level : expectedBlocks.levels()) { assertTrue("node [" + node + "] does have level [" + level + "] in it's blocks", state.getBlocks().hasGlobalBlock (level)); } } }, maxWaitTime.getMillis(), TimeUnit.MILLISECONDS); } void assertDifferentMaster(final String node, final String oldMasterNode) throws Exception { assertBusy(() -> { ClusterState state = getNodeClusterState(node); String masterNode = null; if (state.nodes().getMasterNode() != null) { masterNode = state.nodes().getMasterNode().getName(); } logger.trace("[{}] master is [{}]", node, state.nodes().getMasterNode()); assertThat("node [" + node + "] still has [" + masterNode + "] as master", oldMasterNode, not(equalTo(masterNode))); }, 10, TimeUnit.SECONDS); } void assertMaster(String masterNode, List<String> nodes) throws Exception { assertBusy(() -> { for (String node : nodes) { ClusterState state = getNodeClusterState(node); String failMsgSuffix = "cluster_state:\n" + state; assertThat("wrong node count on [" + node + "]. " + failMsgSuffix, state.nodes().getSize(), equalTo(nodes.size())); String otherMasterNodeName = state.nodes().getMasterNode() != null ? state.nodes().getMasterNode().getName() : null; assertThat("wrong master on node [" + node + "]. " + failMsgSuffix, otherMasterNodeName, equalTo(masterNode)); } }); } public ServiceDisruptionScheme addRandomDisruptionScheme() { // TODO: add partial partitions NetworkDisruption p; final DisruptedLinks disruptedLinks; if (randomBoolean()) { disruptedLinks = TwoPartitions.random(random(), internalCluster().getNodeNames()); } else { disruptedLinks = Bridge.random(random(), internalCluster().getNodeNames()); } final NetworkLinkDisruptionType disruptionType; switch (randomInt(2)) { case 0: disruptionType = new NetworkDisruption.NetworkUnresponsive(); break; case 1: disruptionType = new NetworkDisconnect(); break; case 2: disruptionType = NetworkDisruption.NetworkDelay.random(random()); break; default: throw new IllegalArgumentException(); } final ServiceDisruptionScheme scheme; if (rarely()) { scheme = new SlowClusterStateProcessing(random()); } else { scheme = new NetworkDisruption(disruptedLinks, disruptionType); } setDisruptionScheme(scheme); return scheme; } NetworkDisruption addRandomDisruptionType(TwoPartitions partitions) { final NetworkLinkDisruptionType disruptionType; if (randomBoolean()) { disruptionType = new NetworkDisruption.NetworkUnresponsive(); } else { disruptionType = new NetworkDisconnect(); } NetworkDisruption partition = new NetworkDisruption(partitions, disruptionType); setDisruptionScheme(partition); return partition; } TwoPartitions isolateNode(String isolatedNode) { Set<String> side1 = new HashSet<>(); Set<String> side2 = new HashSet<>(Arrays.asList(internalCluster().getNodeNames())); side1.add(isolatedNode); side2.remove(isolatedNode); return new TwoPartitions(side1, side2); } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.subjects; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; import java.util.ArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.junit.*; import org.mockito.*; import io.reactivex.*; import io.reactivex.disposables.Disposable; import io.reactivex.exceptions.TestException; import io.reactivex.functions.*; import io.reactivex.observers.*; public class PublishSubjectTest extends SubjectTest<Integer> { @Override protected Subject<Integer> create() { return PublishSubject.create(); } @Test public void testCompleted() { PublishSubject<String> subject = PublishSubject.create(); Observer<String> observer = TestHelper.mockObserver(); subject.subscribe(observer); subject.onNext("one"); subject.onNext("two"); subject.onNext("three"); subject.onComplete(); Observer<String> anotherSubscriber = TestHelper.mockObserver(); subject.subscribe(anotherSubscriber); subject.onNext("four"); subject.onComplete(); subject.onError(new Throwable()); assertCompletedSubscriber(observer); // todo bug? assertNeverSubscriber(anotherSubscriber); } @Test public void testCompletedStopsEmittingData() { PublishSubject<Object> channel = PublishSubject.create(); Observer<Object> observerA = TestHelper.mockObserver(); Observer<Object> observerB = TestHelper.mockObserver(); Observer<Object> observerC = TestHelper.mockObserver(); TestObserver<Object> to = new TestObserver<Object>(observerA); channel.subscribe(to); channel.subscribe(observerB); InOrder inOrderA = inOrder(observerA); InOrder inOrderB = inOrder(observerB); InOrder inOrderC = inOrder(observerC); channel.onNext(42); inOrderA.verify(observerA).onNext(42); inOrderB.verify(observerB).onNext(42); to.dispose(); inOrderA.verifyNoMoreInteractions(); channel.onNext(4711); inOrderB.verify(observerB).onNext(4711); channel.onComplete(); inOrderB.verify(observerB).onComplete(); channel.subscribe(observerC); inOrderC.verify(observerC).onComplete(); channel.onNext(13); inOrderB.verifyNoMoreInteractions(); inOrderC.verifyNoMoreInteractions(); } private void assertCompletedSubscriber(Observer<String> observer) { verify(observer, times(1)).onNext("one"); verify(observer, times(1)).onNext("two"); verify(observer, times(1)).onNext("three"); verify(observer, Mockito.never()).onError(any(Throwable.class)); verify(observer, times(1)).onComplete(); } @Test public void testError() { PublishSubject<String> subject = PublishSubject.create(); Observer<String> observer = TestHelper.mockObserver(); subject.subscribe(observer); subject.onNext("one"); subject.onNext("two"); subject.onNext("three"); subject.onError(testException); Observer<String> anotherSubscriber = TestHelper.mockObserver(); subject.subscribe(anotherSubscriber); subject.onNext("four"); subject.onError(new Throwable()); subject.onComplete(); assertErrorSubscriber(observer); // todo bug? assertNeverSubscriber(anotherSubscriber); } private void assertErrorSubscriber(Observer<String> observer) { verify(observer, times(1)).onNext("one"); verify(observer, times(1)).onNext("two"); verify(observer, times(1)).onNext("three"); verify(observer, times(1)).onError(testException); verify(observer, Mockito.never()).onComplete(); } @Test public void testSubscribeMidSequence() { PublishSubject<String> subject = PublishSubject.create(); Observer<String> observer = TestHelper.mockObserver(); subject.subscribe(observer); subject.onNext("one"); subject.onNext("two"); assertObservedUntilTwo(observer); Observer<String> anotherSubscriber = TestHelper.mockObserver(); subject.subscribe(anotherSubscriber); subject.onNext("three"); subject.onComplete(); assertCompletedSubscriber(observer); assertCompletedStartingWithThreeSubscriber(anotherSubscriber); } private void assertCompletedStartingWithThreeSubscriber(Observer<String> observer) { verify(observer, Mockito.never()).onNext("one"); verify(observer, Mockito.never()).onNext("two"); verify(observer, times(1)).onNext("three"); verify(observer, Mockito.never()).onError(any(Throwable.class)); verify(observer, times(1)).onComplete(); } @Test public void testUnsubscribeFirstSubscriber() { PublishSubject<String> subject = PublishSubject.create(); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); subject.subscribe(to); subject.onNext("one"); subject.onNext("two"); to.dispose(); assertObservedUntilTwo(observer); Observer<String> anotherSubscriber = TestHelper.mockObserver(); subject.subscribe(anotherSubscriber); subject.onNext("three"); subject.onComplete(); assertObservedUntilTwo(observer); assertCompletedStartingWithThreeSubscriber(anotherSubscriber); } private void assertObservedUntilTwo(Observer<String> observer) { verify(observer, times(1)).onNext("one"); verify(observer, times(1)).onNext("two"); verify(observer, Mockito.never()).onNext("three"); verify(observer, Mockito.never()).onError(any(Throwable.class)); verify(observer, Mockito.never()).onComplete(); } @Test public void testNestedSubscribe() { final PublishSubject<Integer> s = PublishSubject.create(); final AtomicInteger countParent = new AtomicInteger(); final AtomicInteger countChildren = new AtomicInteger(); final AtomicInteger countTotal = new AtomicInteger(); final ArrayList<String> list = new ArrayList<String>(); s.flatMap(new Function<Integer, Observable<String>>() { @Override public Observable<String> apply(final Integer v) { countParent.incrementAndGet(); // then subscribe to subject again (it will not receive the previous value) return s.map(new Function<Integer, String>() { @Override public String apply(Integer v2) { countChildren.incrementAndGet(); return "Parent: " + v + " Child: " + v2; } }); } }).subscribe(new Consumer<String>() { @Override public void accept(String v) { countTotal.incrementAndGet(); list.add(v); } }); for (int i = 0; i < 10; i++) { s.onNext(i); } s.onComplete(); // System.out.println("countParent: " + countParent.get()); // System.out.println("countChildren: " + countChildren.get()); // System.out.println("countTotal: " + countTotal.get()); // 9+8+7+6+5+4+3+2+1+0 == 45 assertEquals(45, list.size()); } /** * Should be able to unsubscribe all Subscribers, have it stop emitting, then subscribe new ones and it start emitting again. */ @Test public void testReSubscribe() { final PublishSubject<Integer> ps = PublishSubject.create(); Observer<Integer> o1 = TestHelper.mockObserver(); TestObserver<Integer> to = new TestObserver<Integer>(o1); ps.subscribe(to); // emit ps.onNext(1); // validate we got it InOrder inOrder1 = inOrder(o1); inOrder1.verify(o1, times(1)).onNext(1); inOrder1.verifyNoMoreInteractions(); // unsubscribe to.dispose(); // emit again but nothing will be there to receive it ps.onNext(2); Observer<Integer> o2 = TestHelper.mockObserver(); TestObserver<Integer> to2 = new TestObserver<Integer>(o2); ps.subscribe(to2); // emit ps.onNext(3); // validate we got it InOrder inOrder2 = inOrder(o2); inOrder2.verify(o2, times(1)).onNext(3); inOrder2.verifyNoMoreInteractions(); to2.dispose(); } private final Throwable testException = new Throwable(); @Test(timeout = 1000) public void testUnsubscriptionCase() { PublishSubject<String> src = PublishSubject.create(); for (int i = 0; i < 10; i++) { final Observer<Object> o = TestHelper.mockObserver(); InOrder inOrder = inOrder(o); String v = "" + i; System.out.printf("Turn: %d%n", i); src.firstElement() .toObservable() .flatMap(new Function<String, Observable<String>>() { @Override public Observable<String> apply(String t1) { return Observable.just(t1 + ", " + t1); } }) .subscribe(new DefaultObserver<String>() { @Override public void onNext(String t) { o.onNext(t); } @Override public void onError(Throwable e) { o.onError(e); } @Override public void onComplete() { o.onComplete(); } }); src.onNext(v); inOrder.verify(o).onNext(v + ", " + v); inOrder.verify(o).onComplete(); verify(o, never()).onError(any(Throwable.class)); } } // FIXME RS subscribers are not allowed to throw // @Test // public void testOnErrorThrowsDoesntPreventDelivery() { // PublishSubject<String> ps = PublishSubject.create(); // // ps.subscribe(); // TestObserver<String> to = new TestObserver<String>(); // ps.subscribe(to); // // try { // ps.onError(new RuntimeException("an exception")); // fail("expect OnErrorNotImplementedException"); // } catch (OnErrorNotImplementedException e) { // // ignore // } // // even though the onError above throws we should still receive it on the other subscriber // assertEquals(1, to.getOnErrorEvents().size()); // } // FIXME RS subscribers are not allowed to throw // /** // * This one has multiple failures so should get a CompositeException // */ // @Test // public void testOnErrorThrowsDoesntPreventDelivery2() { // PublishSubject<String> ps = PublishSubject.create(); // // ps.subscribe(); // ps.subscribe(); // TestObserver<String> to = new TestObserver<String>(); // ps.subscribe(to); // ps.subscribe(); // ps.subscribe(); // ps.subscribe(); // // try { // ps.onError(new RuntimeException("an exception")); // fail("expect OnErrorNotImplementedException"); // } catch (CompositeException e) { // // we should have 5 of them // assertEquals(5, e.getExceptions().size()); // } // // even though the onError above throws we should still receive it on the other subscriber // assertEquals(1, to.getOnErrorEvents().size()); // } @Test public void testCurrentStateMethodsNormal() { PublishSubject<Object> as = PublishSubject.create(); assertFalse(as.hasThrowable()); assertFalse(as.hasComplete()); assertNull(as.getThrowable()); as.onNext(1); assertFalse(as.hasThrowable()); assertFalse(as.hasComplete()); assertNull(as.getThrowable()); as.onComplete(); assertFalse(as.hasThrowable()); assertTrue(as.hasComplete()); assertNull(as.getThrowable()); } @Test public void testCurrentStateMethodsEmpty() { PublishSubject<Object> as = PublishSubject.create(); assertFalse(as.hasThrowable()); assertFalse(as.hasComplete()); assertNull(as.getThrowable()); as.onComplete(); assertFalse(as.hasThrowable()); assertTrue(as.hasComplete()); assertNull(as.getThrowable()); } @Test public void testCurrentStateMethodsError() { PublishSubject<Object> as = PublishSubject.create(); assertFalse(as.hasThrowable()); assertFalse(as.hasComplete()); assertNull(as.getThrowable()); as.onError(new TestException()); assertTrue(as.hasThrowable()); assertFalse(as.hasComplete()); assertTrue(as.getThrowable() instanceof TestException); } @Ignore("Observable doesn't do backpressure") @Test public void requestValidation() { // TestHelper.assertBadRequestReported(PublishSubject.create()); } @Test public void crossCancel() { final TestObserver<Integer> to1 = new TestObserver<Integer>(); TestObserver<Integer> to2 = new TestObserver<Integer>() { @Override public void onNext(Integer t) { super.onNext(t); to1.cancel(); } }; PublishSubject<Integer> ps = PublishSubject.create(); ps.subscribe(to2); ps.subscribe(to1); ps.onNext(1); to2.assertValue(1); to1.assertNoValues(); } @Test public void crossCancelOnError() { final TestObserver<Integer> to1 = new TestObserver<Integer>(); TestObserver<Integer> to2 = new TestObserver<Integer>() { @Override public void onError(Throwable t) { super.onError(t); to1.cancel(); } }; PublishSubject<Integer> ps = PublishSubject.create(); ps.subscribe(to2); ps.subscribe(to1); ps.onError(new TestException()); to2.assertError(TestException.class); to1.assertNoErrors(); } @Test public void crossCancelOnComplete() { final TestObserver<Integer> to1 = new TestObserver<Integer>(); TestObserver<Integer> to2 = new TestObserver<Integer>() { @Override public void onComplete() { super.onComplete(); to1.cancel(); } }; PublishSubject<Integer> ps = PublishSubject.create(); ps.subscribe(to2); ps.subscribe(to1); ps.onComplete(); to2.assertComplete(); to1.assertNotComplete(); } @Test @Ignore("Observable doesn't do backpressure") public void backpressureOverflow() { // PublishSubject<Integer> ps = PublishSubject.create(); // // TestObserver<Integer> to = ps.test(0L); // // ps.onNext(1); // // to.assertNoValues() // .assertNotComplete() // .assertError(MissingBackpressureException.class) // ; } @Test public void onSubscribeCancelsImmediately() { PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.test(); ps.subscribe(new Observer<Integer>() { @Override public void onSubscribe(Disposable d) { d.dispose(); d.dispose(); } @Override public void onNext(Integer t) { } @Override public void onError(Throwable t) { } @Override public void onComplete() { } }); to.cancel(); assertFalse(ps.hasObservers()); } @Test public void terminateRace() throws Exception { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.test(); Runnable task = new Runnable() { @Override public void run() { ps.onComplete(); } }; TestHelper.race(task, task); to .awaitDone(5, TimeUnit.SECONDS) .assertResult(); } } @Test public void addRemoveRance() throws Exception { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final PublishSubject<Integer> ps = PublishSubject.create(); final TestObserver<Integer> to = ps.test(); Runnable r1 = new Runnable() { @Override public void run() { ps.subscribe(); } }; Runnable r2 = new Runnable() { @Override public void run() { to.cancel(); } }; TestHelper.race(r1, r2); } } @Test public void addTerminateRance() throws Exception { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final PublishSubject<Integer> ps = PublishSubject.create(); Runnable r1 = new Runnable() { @Override public void run() { ps.subscribe(); } }; Runnable r2 = new Runnable() { @Override public void run() { ps.onComplete(); } }; TestHelper.race(r1, r2); } } @Test public void addCompleteRance() throws Exception { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final PublishSubject<Integer> ps = PublishSubject.create(); final TestObserver<Integer> to = new TestObserver<Integer>(); Runnable r1 = new Runnable() { @Override public void run() { ps.subscribe(to); } }; Runnable r2 = new Runnable() { @Override public void run() { ps.onComplete(); } }; TestHelper.race(r1, r2); to.awaitDone(5, TimeUnit.SECONDS) .assertResult(); } } @Test public void subscribeToAfterComplete() { PublishSubject<Integer> ps = PublishSubject.create(); ps.onComplete(); PublishSubject<Integer> ps2 = PublishSubject.create(); ps2.subscribe(ps); assertFalse(ps2.hasObservers()); } @Test public void subscribedTo() { PublishSubject<Integer> ps = PublishSubject.create(); PublishSubject<Integer> ps2 = PublishSubject.create(); ps.subscribe(ps2); TestObserver<Integer> to = ps2.test(); ps.onNext(1); ps.onNext(2); ps.onComplete(); to.assertResult(1, 2); } }
package game.sprites.player.ships; import java.awt.Color; import java.awt.Font; import java.awt.Graphics2D; import java.awt.Image; import java.awt.event.KeyEvent; import java.awt.geom.Rectangle2D; import java.util.ArrayList; import java.util.List; import game.input.InputManager; import game.level.util.Renderer; import game.level.util.WeightedValue; import game.main.Display; import game.sprites.player.weaponry.SideTorpedo; import game.sprites.player.weaponry.Torpedo; /**Player's spaceship. Fighter jet. Upgrades will be possible during game.*/ public class Jet { //Key words for ResourceManager private final String keyWord = "jet"; private final String destroyedKeyWord = "jetDestroyed"; private final String forceFieldKeyWord = "forceField"; private final String sideTorpedoLauncherKeyWord = "sideTorpedoLauncher"; //Display size private static final int DISPLAY_WIDTH = Display.SIZE.getWidth(); private static final int DISPLAY_HEIGHT = Display.SIZE.getHeight(); //Constants for time conversion private static final long TO_MILLIS = 1000000; //Jet properties private final double jet_width = 60; private final double jet_height = 90; private double jetSpeed = 6; private double coordX, coordY; private boolean isVisible, isDestroyed; private int hp = 100; //Hp display private final String healthTextKeyWord = "hpText"; private final int hpTextWidth = 51; private final int hpTextHeight = 40; private final int textCoordX = 0; private final int textCoordY = DISPLAY_HEIGHT-hpTextHeight; private final Font hpDisplayFont = new Font("Halvetica", Font.BOLD, 28); private final Color hpDisplayColor = new Color(255, 255, 255); private final int hpDisplayYOffset = 10; //Temporary torpedo storage private List<Torpedo> torpedosStorage; //Basic torpedos private String basicTorpedoKeyWord = "Basic Torpedo"; private long basicTorpedoReloadTimeInMillis = 400; private long basicTorpedoTimeOfLastShot = System.nanoTime()/TO_MILLIS; //Side torpedos private String sideTorpedosKeyWord = "Side Torpedos"; private long sideTorpedosReloadTimeInMillis = 175; private long sideTorpedosTimeOfLastShot; private double leftSideTorpedoCoordXOffset = 8; private double rightSideTorpedoCoordXOffset = 52; //Images and x, y coordinates for renderer private Image rendererBuffer; private double rendererCoordX; private double rendererCoordY; private String rendererHp; //Variables for controlling power-ups private boolean isForceFieldActive; private boolean isSideTorpedosEnabled; //Modifiers should be add to jet x, y location to properly draw upgrade private final double forceFieldCoordXOffset = -3, forceFieldCoordYOffset = -4; private final double leftSideTorpedoLauncherCoordXOffset = 0, leftSideTorpedoLauncherCoordYOffset = 38; private final double rightSideTorpedoLauncherCoordXOffset = 44, rightSideTorpedoLauncherCoordYOffset = 38; //Destroyed animation private final double destroyedOffsetX = -3, destroyedOffsetY = 12; private long timeOfDeath; private long deathAnimationTimeInMillis = 150; public Jet(boolean isVisible){ this.isVisible = isVisible; coordX = (DISPLAY_WIDTH/2)-(jet_width/2); coordY = DISPLAY_HEIGHT-jet_height; } public void updateHP(int damage){ if(isForceFieldActive){ isForceFieldActive = false; }else if(isSideTorpedosEnabled){ isSideTorpedosEnabled = false; }else { hp -= damage; if(hp <= 0){ isDestroyed = true; timeOfDeath = System.nanoTime(); } } } public int getHP(){ return hp; } public void addHP(int hpPoints){ hp += hpPoints; } public void jetControl(InputManager input, List<WeightedValue<String, List<Torpedo>>> torpedoStorage, double delta){ if(input.isKeyPressed(KeyEvent.VK_UP)){ if(coordY <= 0){ coordY += 1; }else { coordY -= jetSpeed*delta; } } if(input.isKeyPressed(KeyEvent.VK_DOWN)){ if(coordY >= DISPLAY_HEIGHT - jet_height){ coordY -= 1; }else { coordY += jetSpeed*delta; } } if(input.isKeyPressed(KeyEvent.VK_LEFT)){ if(coordX <= 0){ coordX += 1; }else { coordX -= jetSpeed*delta; } } if(input.isKeyPressed(KeyEvent.VK_RIGHT)){ if(coordX >= DISPLAY_WIDTH - jet_width){ coordX -= 1; }else { coordX += jetSpeed*delta; } } if(input.isKeyPressed(KeyEvent.VK_SPACE)){ try{ //Basic torpedo launchBasicTorpedo(torpedoStorage); //Side torpedos launchSideTorpedos(torpedoStorage); }finally{ torpedosStorage = null; } } } /**Launch side torpedos.*/ private void launchSideTorpedos(List<WeightedValue<String, List<Torpedo>>> torpedoStorage){ if(isSideTorpedosEnabled){ long now = System.nanoTime()/TO_MILLIS; if(now - sideTorpedosTimeOfLastShot > sideTorpedosReloadTimeInMillis){ torpedosStorage = getTorpedoStorage(sideTorpedosKeyWord ,torpedoStorage); //left launcher double x = coordX+leftSideTorpedoCoordXOffset; double y = coordY+leftSideTorpedoLauncherCoordYOffset; torpedosStorage.add(new SideTorpedo(x, y, true)); //right launcher x = coordX+rightSideTorpedoCoordXOffset; y = coordY+rightSideTorpedoLauncherCoordYOffset; torpedosStorage.add(new SideTorpedo(x, y, true)); sideTorpedosTimeOfLastShot = now; } }else if(!isSideTorpedosEnabled){ removeTorpedoStorage(sideTorpedosKeyWord, torpedoStorage); sideTorpedosTimeOfLastShot = 0; } } /**Launch basic torpedo.*/ private void launchBasicTorpedo(List<WeightedValue<String, List<Torpedo>>> torpedoStorage){ long now = System.nanoTime()/TO_MILLIS; if(now - basicTorpedoTimeOfLastShot > basicTorpedoReloadTimeInMillis){ torpedosStorage = getTorpedoStorage(basicTorpedoKeyWord ,torpedoStorage); double x = coordX+(jet_width/2); double y = coordY; torpedosStorage.add(new Torpedo(x, y, true)); basicTorpedoTimeOfLastShot = now; } } public double getJetSpeed() { return jetSpeed; } public void setJetSpeed(double jetSpeed) { this.jetSpeed = jetSpeed; } public double getCoordX() { return coordX; } public void setCoordX(double coordX) { this.coordX = coordX; } public double getCoordY() { return coordY; } public void setCoordY(double coordY) { this.coordY = coordY; } public boolean isVisible() { return isVisible; } public void setVisible(boolean isVisible) { this.isVisible = isVisible; } public double getJetWidth() { return jet_width; } public double getJetHeight() { return jet_height; } public Rectangle2D.Double getJetBounds(){ return new Rectangle2D.Double(coordX, coordY, jet_width, jet_height); } public void setForceFieldActive(boolean isForceFieldActive) { this.isForceFieldActive = isForceFieldActive; } public boolean isForceFieldActive() { return isForceFieldActive; } public boolean isSideTorpedosEnabled() { return isSideTorpedosEnabled; } public void setSideTorpedosEnabled(boolean isSideTorpedosEnabled) { this.isSideTorpedosEnabled = isSideTorpedosEnabled; } private void removeTorpedoStorage(String keyWord, List<WeightedValue<String, List<Torpedo>>> torpedoStorage){ for(int i = 0 ; i < torpedoStorage.size() ; i++){ WeightedValue<String, List<Torpedo>> torpedoList = torpedoStorage.get(i); if(keyWord.equals(torpedoList.getWeight())){ torpedosStorage = torpedoList.getValue(); if(torpedosStorage.isEmpty()){ torpedoStorage.remove(i); torpedoList = null; torpedosStorage = null; } } } } private List<Torpedo> getTorpedoStorage(String keyWord, List<WeightedValue<String, List<Torpedo>>> torpedoStorage){ if(!torpedoStorage.isEmpty()){ for(WeightedValue<String, List<Torpedo>> torpedoList : torpedoStorage){ if(keyWord.equals(torpedoList.getWeight())){ torpedosStorage = torpedoList.getValue(); return torpedosStorage; } } torpedosStorage = new ArrayList<Torpedo>(); torpedoStorage.add(new WeightedValue<String, List<Torpedo>>(keyWord, torpedosStorage)); return torpedosStorage; }else if(torpedoStorage.isEmpty()){ torpedosStorage = new ArrayList<Torpedo>(); torpedoStorage.add(new WeightedValue<String, List<Torpedo>>(keyWord, torpedosStorage)); return torpedosStorage; } return null; } public void renderJet(Renderer renderer, Graphics2D g2d){ try{ if(isDestroyed){ rendererBuffer = renderer.getImage(destroyedKeyWord); rendererCoordX = coordX+destroyedOffsetX; rendererCoordY = coordY+destroyedOffsetY; g2d.drawImage(rendererBuffer, (int)rendererCoordX, (int)rendererCoordY, null); if((System.nanoTime()-timeOfDeath)/TO_MILLIS > deathAnimationTimeInMillis){ isDestroyed = false; isVisible = false; } }else { //Force Field if(isForceFieldActive){ rendererBuffer = renderer.getImage(forceFieldKeyWord); rendererCoordX = coordX+forceFieldCoordXOffset; rendererCoordY = coordY+forceFieldCoordYOffset; g2d.drawImage(rendererBuffer, (int)rendererCoordX, (int)rendererCoordY, null); } //Jet if(isVisible){ rendererBuffer = renderer.getImage(keyWord); rendererCoordX = coordX; rendererCoordY = coordY; g2d.drawImage(rendererBuffer, (int)rendererCoordX, (int)rendererCoordY, null); } //Side Torpedos if(isSideTorpedosEnabled){ rendererBuffer = renderer.getImage(sideTorpedoLauncherKeyWord); //Left launcher rendererCoordX = coordX+leftSideTorpedoLauncherCoordXOffset; rendererCoordY = coordY+leftSideTorpedoLauncherCoordYOffset; g2d.drawImage(rendererBuffer, (int)rendererCoordX, (int)rendererCoordY, null); //Right launcher rendererCoordX = coordX+rightSideTorpedoLauncherCoordXOffset; rendererCoordY = coordY+rightSideTorpedoLauncherCoordYOffset; g2d.drawImage(rendererBuffer, (int)rendererCoordX, (int)rendererCoordY, null); } } //Hp bar renderHP(g2d, renderer); }finally{ rendererCoordX = 0; rendererCoordY = 0; rendererHp = null; rendererBuffer = null; } } private void renderHP(Graphics2D g2d, Renderer renderer){ rendererBuffer = renderer.getImage(healthTextKeyWord); g2d.setFont(hpDisplayFont); g2d.setColor(hpDisplayColor); g2d.drawImage(rendererBuffer, textCoordX, textCoordY, null); if(hp < 0){ rendererHp = "0"; }else { rendererHp = Long.toString(hp); } g2d.drawString(rendererHp, hpTextWidth, (DISPLAY_HEIGHT-hpDisplayYOffset)); } public boolean isDestroyed() { return isDestroyed; } }
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.logic.players; import com.google.common.collect.Maps; import org.lwjgl.opengl.GL11; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.asset.Asset; import org.terasology.asset.Assets; import org.terasology.entitySystem.entity.EntityRef; import org.terasology.entitySystem.systems.BaseComponentSystem; import org.terasology.entitySystem.systems.RegisterMode; import org.terasology.entitySystem.systems.RegisterSystem; import org.terasology.entitySystem.systems.RenderSystem; import org.terasology.logic.characters.CharacterComponent; import org.terasology.logic.characters.CharacterMovementComponent; import org.terasology.logic.inventory.InventoryUtils; import org.terasology.logic.inventory.ItemComponent; import org.terasology.math.geom.Vector2f; import org.terasology.math.geom.Vector3f; import org.terasology.math.geom.Vector4f; import org.terasology.registry.In; import org.terasology.rendering.assets.material.Material; import org.terasology.rendering.assets.mesh.Mesh; import org.terasology.rendering.assets.shader.ShaderProgramFeature; import org.terasology.rendering.assets.texture.Texture; import org.terasology.rendering.assets.texture.TextureRegion; import org.terasology.rendering.iconmesh.IconMeshFactory; import org.terasology.rendering.primitives.Tessellator; import org.terasology.rendering.primitives.TessellatorHelper; import org.terasology.rendering.world.WorldRenderer; import org.terasology.world.WorldProvider; import org.terasology.world.block.Block; import org.terasology.world.block.family.BlockFamily; import org.terasology.world.block.items.BlockItemComponent; import java.util.Map; import static org.lwjgl.opengl.GL11.glBindTexture; import static org.lwjgl.opengl.GL11.glPopMatrix; import static org.lwjgl.opengl.GL11.glPushMatrix; import static org.lwjgl.opengl.GL11.glRotatef; import static org.lwjgl.opengl.GL11.glScalef; import static org.lwjgl.opengl.GL11.glTranslatef; /** * @author Immortius */ @RegisterSystem(RegisterMode.CLIENT) public class FirstPersonRenderer extends BaseComponentSystem implements RenderSystem { private static final Logger logger = LoggerFactory.getLogger(FirstPersonRenderer.class); @In private WorldProvider worldProvider; @In private LocalPlayer localPlayer; @In private WorldRenderer worldRenderer; private Mesh handMesh; private Texture handTex; private Map<String, Mesh> iconMeshes = Maps.newHashMap(); @Override public void initialise() { Vector2f texPos = new Vector2f(40.0f * 0.015625f, 32.0f * 0.03125f); Vector2f texWidth = new Vector2f(4.0f * 0.015625f, -12.0f * 0.03125f); Tessellator tessellator = new Tessellator(); TessellatorHelper.addBlockMesh(tessellator, new Vector4f(1, 1, 1, 1), texPos, texWidth, 1.0f, 1.0f, 0.9f, 0.0f, 0.0f, 0.0f); handMesh = tessellator.generateMesh(); handTex = Assets.getTexture("engine:char"); } @Override public void renderOpaque() { } @Override public void renderAlphaBlend() { } @Override public void renderFirstPerson() { CharacterComponent character = localPlayer.getCharacterEntity().getComponent(CharacterComponent.class); if (character == null) { return; } CharacterMovementComponent charMoveComp = localPlayer.getCharacterEntity().getComponent(CharacterMovementComponent.class); if (charMoveComp == null) { return; } float bobOffset = calcBobbingOffset(charMoveComp.footstepDelta, (float) java.lang.Math.PI / 8f, 0.05f); float handMovementAnimationOffset = character.handAnimation; int invSlotIndex = character.selectedItem; EntityRef heldItem = InventoryUtils.getItemAt(localPlayer.getCharacterEntity(), invSlotIndex); ItemComponent heldItemComp = heldItem.getComponent(ItemComponent.class); BlockItemComponent blockItem = heldItem.getComponent(BlockItemComponent.class); if (blockItem != null && blockItem.blockFamily != null) { renderBlock(blockItem.blockFamily, bobOffset, handMovementAnimationOffset); } else if (heldItemComp != null && heldItemComp.renderWithIcon) { renderIcon(heldItemComp.icon, bobOffset, handMovementAnimationOffset); } else { renderHand(bobOffset, handMovementAnimationOffset); } } @Override public void renderShadows() { } @Override public void renderOverlay() { } private void renderHand(float bobOffset, float handMovementAnimationOffset) { Material shader = Assets.getMaterial("engine:prog.block"); shader.activateFeature(ShaderProgramFeature.FEATURE_USE_MATRIX_STACK); shader.enable(); shader.setFloat("sunlight", worldRenderer.getSunlightValue(), true); shader.setFloat("blockLight", worldRenderer.getBlockLightValue(), true); glBindTexture(GL11.GL_TEXTURE_2D, handTex.getId()); glPushMatrix(); glTranslatef(0.8f, -0.8f + bobOffset - handMovementAnimationOffset * 0.5f, -1.0f - handMovementAnimationOffset * 0.5f); glRotatef(-45f - handMovementAnimationOffset * 64.0f, 1.0f, 0.0f, 0.0f); glRotatef(35f, 0.0f, 1.0f, 0.0f); glTranslatef(0f, 0.25f, 0f); glScalef(0.3f, 0.6f, 0.3f); handMesh.render(); glPopMatrix(); shader.deactivateFeature(ShaderProgramFeature.FEATURE_USE_MATRIX_STACK); } private void renderIcon(TextureRegion iconTexture, float bobOffset, float handMovementAnimationOffset) { if (iconTexture != null) { Material shader = Assets.getMaterial("engine:prog.block"); shader.activateFeature(ShaderProgramFeature.FEATURE_USE_MATRIX_STACK); shader.enable(); shader.setBoolean("textured", false, true); shader.setFloat("sunlight", worldRenderer.getSunlightValue(), true); shader.setFloat("blockLight", worldRenderer.getBlockLightValue(), true); glPushMatrix(); float textureScale = Math.max(iconTexture.getWidth(), iconTexture.getHeight()) / 16f; glTranslatef(1.0f, -0.7f + bobOffset - handMovementAnimationOffset * 0.5f, (-1.5f - handMovementAnimationOffset * 0.5f) * (float) Math.pow(textureScale, 0.5)); glRotatef(-handMovementAnimationOffset * 64.0f, 1.0f, 0.0f, 0.0f); glRotatef(-20f, 1.0f, 0.0f, 0.0f); glRotatef(-80f, 0.0f, 1.0f, 0.0f); glRotatef(45f, 0.0f, 0.0f, 1.0f); float scale = 0.75f * (float) Math.pow(textureScale, 0.5); glScalef(scale, scale, scale); if (iconTexture instanceof Asset<?>) { Mesh itemMesh = IconMeshFactory.getIconMesh(iconTexture); itemMesh.render(); } glPopMatrix(); shader.deactivateFeature(ShaderProgramFeature.FEATURE_USE_MATRIX_STACK); } } private void renderBlock(BlockFamily blockFamily, float bobOffset, float handMovementAnimationOffset) { Block activeBlock = blockFamily.getArchetypeBlock(); Vector3f playerPos = localPlayer.getPosition(); // Adjust the brightness of the block according to the current position of the player Material shader = Assets.getMaterial("engine:prog.block"); shader.activateFeature(ShaderProgramFeature.FEATURE_USE_MATRIX_STACK); shader.enable(); glPushMatrix(); glTranslatef(1.0f, -0.7f + bobOffset - handMovementAnimationOffset * 0.5f, -1.5f - handMovementAnimationOffset * 0.5f); glRotatef(-25f - handMovementAnimationOffset * 64.0f, 1.0f, 0.0f, 0.0f); glRotatef(35f, 0.0f, 1.0f, 0.0f); glTranslatef(0f, 0.1f, 0f); glScalef(0.75f, 0.75f, 0.75f); float blockLight = worldRenderer.getBlockLightValue(); float sunlight = worldRenderer.getSunlightValue(); // Blocks with a luminance > 0.0 shouldn't be affected by block light if (blockFamily.getArchetypeBlock().getLuminance() > 0.0) { blockLight = 1.0f; } activeBlock.renderWithLightValue(sunlight, blockLight); glPopMatrix(); shader.deactivateFeature(ShaderProgramFeature.FEATURE_USE_MATRIX_STACK); } private float calcBobbingOffset(float counter, float phaseOffset, float amplitude) { return (float) java.lang.Math.sin(2 * Math.PI * counter + phaseOffset) * amplitude; } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.snowball.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/snowball-2016-06-30/UpdateCluster" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateClusterRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The cluster ID of the cluster that you want to update, for example * <code>CID123e4567-e89b-12d3-a456-426655440000</code>. * </p> */ private String clusterId; /** * <p> * The new role Amazon Resource Name (ARN) that you want to associate with this cluster. To create a role ARN, use * the <a href="http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html">CreateRole</a> API action in * AWS Identity and Access Management (IAM). * </p> */ private String roleARN; /** * <p> * The updated description of this cluster. * </p> */ private String description; /** * <p> * The updated arrays of <a>JobResource</a> objects that can include updated <a>S3Resource</a> objects or * <a>LambdaResource</a> objects. * </p> */ private JobResource resources; /** * <p> * The ID of the updated <a>Address</a> object. * </p> */ private String addressId; /** * <p> * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * </p> */ private String shippingOption; /** * <p> * The new or updated <a>Notification</a> object. * </p> */ private Notification notification; /** * <p> * The updated ID for the forwarding address for a cluster. This field is not supported in most regions. * </p> */ private String forwardingAddressId; /** * <p> * The cluster ID of the cluster that you want to update, for example * <code>CID123e4567-e89b-12d3-a456-426655440000</code>. * </p> * * @param clusterId * The cluster ID of the cluster that you want to update, for example * <code>CID123e4567-e89b-12d3-a456-426655440000</code>. */ public void setClusterId(String clusterId) { this.clusterId = clusterId; } /** * <p> * The cluster ID of the cluster that you want to update, for example * <code>CID123e4567-e89b-12d3-a456-426655440000</code>. * </p> * * @return The cluster ID of the cluster that you want to update, for example * <code>CID123e4567-e89b-12d3-a456-426655440000</code>. */ public String getClusterId() { return this.clusterId; } /** * <p> * The cluster ID of the cluster that you want to update, for example * <code>CID123e4567-e89b-12d3-a456-426655440000</code>. * </p> * * @param clusterId * The cluster ID of the cluster that you want to update, for example * <code>CID123e4567-e89b-12d3-a456-426655440000</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterRequest withClusterId(String clusterId) { setClusterId(clusterId); return this; } /** * <p> * The new role Amazon Resource Name (ARN) that you want to associate with this cluster. To create a role ARN, use * the <a href="http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html">CreateRole</a> API action in * AWS Identity and Access Management (IAM). * </p> * * @param roleARN * The new role Amazon Resource Name (ARN) that you want to associate with this cluster. To create a role * ARN, use the <a * href="http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html">CreateRole</a> API action in * AWS Identity and Access Management (IAM). */ public void setRoleARN(String roleARN) { this.roleARN = roleARN; } /** * <p> * The new role Amazon Resource Name (ARN) that you want to associate with this cluster. To create a role ARN, use * the <a href="http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html">CreateRole</a> API action in * AWS Identity and Access Management (IAM). * </p> * * @return The new role Amazon Resource Name (ARN) that you want to associate with this cluster. To create a role * ARN, use the <a * href="http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html">CreateRole</a> API action * in AWS Identity and Access Management (IAM). */ public String getRoleARN() { return this.roleARN; } /** * <p> * The new role Amazon Resource Name (ARN) that you want to associate with this cluster. To create a role ARN, use * the <a href="http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html">CreateRole</a> API action in * AWS Identity and Access Management (IAM). * </p> * * @param roleARN * The new role Amazon Resource Name (ARN) that you want to associate with this cluster. To create a role * ARN, use the <a * href="http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html">CreateRole</a> API action in * AWS Identity and Access Management (IAM). * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterRequest withRoleARN(String roleARN) { setRoleARN(roleARN); return this; } /** * <p> * The updated description of this cluster. * </p> * * @param description * The updated description of this cluster. */ public void setDescription(String description) { this.description = description; } /** * <p> * The updated description of this cluster. * </p> * * @return The updated description of this cluster. */ public String getDescription() { return this.description; } /** * <p> * The updated description of this cluster. * </p> * * @param description * The updated description of this cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * The updated arrays of <a>JobResource</a> objects that can include updated <a>S3Resource</a> objects or * <a>LambdaResource</a> objects. * </p> * * @param resources * The updated arrays of <a>JobResource</a> objects that can include updated <a>S3Resource</a> objects or * <a>LambdaResource</a> objects. */ public void setResources(JobResource resources) { this.resources = resources; } /** * <p> * The updated arrays of <a>JobResource</a> objects that can include updated <a>S3Resource</a> objects or * <a>LambdaResource</a> objects. * </p> * * @return The updated arrays of <a>JobResource</a> objects that can include updated <a>S3Resource</a> objects or * <a>LambdaResource</a> objects. */ public JobResource getResources() { return this.resources; } /** * <p> * The updated arrays of <a>JobResource</a> objects that can include updated <a>S3Resource</a> objects or * <a>LambdaResource</a> objects. * </p> * * @param resources * The updated arrays of <a>JobResource</a> objects that can include updated <a>S3Resource</a> objects or * <a>LambdaResource</a> objects. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterRequest withResources(JobResource resources) { setResources(resources); return this; } /** * <p> * The ID of the updated <a>Address</a> object. * </p> * * @param addressId * The ID of the updated <a>Address</a> object. */ public void setAddressId(String addressId) { this.addressId = addressId; } /** * <p> * The ID of the updated <a>Address</a> object. * </p> * * @return The ID of the updated <a>Address</a> object. */ public String getAddressId() { return this.addressId; } /** * <p> * The ID of the updated <a>Address</a> object. * </p> * * @param addressId * The ID of the updated <a>Address</a> object. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterRequest withAddressId(String addressId) { setAddressId(addressId); return this; } /** * <p> * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * </p> * * @param shippingOption * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * @see ShippingOption */ public void setShippingOption(String shippingOption) { this.shippingOption = shippingOption; } /** * <p> * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * </p> * * @return The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * @see ShippingOption */ public String getShippingOption() { return this.shippingOption; } /** * <p> * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * </p> * * @param shippingOption * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * @return Returns a reference to this object so that method calls can be chained together. * @see ShippingOption */ public UpdateClusterRequest withShippingOption(String shippingOption) { setShippingOption(shippingOption); return this; } /** * <p> * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * </p> * * @param shippingOption * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * @see ShippingOption */ public void setShippingOption(ShippingOption shippingOption) { this.shippingOption = shippingOption.toString(); } /** * <p> * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * </p> * * @param shippingOption * The updated shipping option value of this cluster's <a>ShippingDetails</a> object. * @return Returns a reference to this object so that method calls can be chained together. * @see ShippingOption */ public UpdateClusterRequest withShippingOption(ShippingOption shippingOption) { setShippingOption(shippingOption); return this; } /** * <p> * The new or updated <a>Notification</a> object. * </p> * * @param notification * The new or updated <a>Notification</a> object. */ public void setNotification(Notification notification) { this.notification = notification; } /** * <p> * The new or updated <a>Notification</a> object. * </p> * * @return The new or updated <a>Notification</a> object. */ public Notification getNotification() { return this.notification; } /** * <p> * The new or updated <a>Notification</a> object. * </p> * * @param notification * The new or updated <a>Notification</a> object. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterRequest withNotification(Notification notification) { setNotification(notification); return this; } /** * <p> * The updated ID for the forwarding address for a cluster. This field is not supported in most regions. * </p> * * @param forwardingAddressId * The updated ID for the forwarding address for a cluster. This field is not supported in most regions. */ public void setForwardingAddressId(String forwardingAddressId) { this.forwardingAddressId = forwardingAddressId; } /** * <p> * The updated ID for the forwarding address for a cluster. This field is not supported in most regions. * </p> * * @return The updated ID for the forwarding address for a cluster. This field is not supported in most regions. */ public String getForwardingAddressId() { return this.forwardingAddressId; } /** * <p> * The updated ID for the forwarding address for a cluster. This field is not supported in most regions. * </p> * * @param forwardingAddressId * The updated ID for the forwarding address for a cluster. This field is not supported in most regions. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterRequest withForwardingAddressId(String forwardingAddressId) { setForwardingAddressId(forwardingAddressId); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getClusterId() != null) sb.append("ClusterId: ").append(getClusterId()).append(","); if (getRoleARN() != null) sb.append("RoleARN: ").append(getRoleARN()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getResources() != null) sb.append("Resources: ").append(getResources()).append(","); if (getAddressId() != null) sb.append("AddressId: ").append(getAddressId()).append(","); if (getShippingOption() != null) sb.append("ShippingOption: ").append(getShippingOption()).append(","); if (getNotification() != null) sb.append("Notification: ").append(getNotification()).append(","); if (getForwardingAddressId() != null) sb.append("ForwardingAddressId: ").append(getForwardingAddressId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateClusterRequest == false) return false; UpdateClusterRequest other = (UpdateClusterRequest) obj; if (other.getClusterId() == null ^ this.getClusterId() == null) return false; if (other.getClusterId() != null && other.getClusterId().equals(this.getClusterId()) == false) return false; if (other.getRoleARN() == null ^ this.getRoleARN() == null) return false; if (other.getRoleARN() != null && other.getRoleARN().equals(this.getRoleARN()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getResources() == null ^ this.getResources() == null) return false; if (other.getResources() != null && other.getResources().equals(this.getResources()) == false) return false; if (other.getAddressId() == null ^ this.getAddressId() == null) return false; if (other.getAddressId() != null && other.getAddressId().equals(this.getAddressId()) == false) return false; if (other.getShippingOption() == null ^ this.getShippingOption() == null) return false; if (other.getShippingOption() != null && other.getShippingOption().equals(this.getShippingOption()) == false) return false; if (other.getNotification() == null ^ this.getNotification() == null) return false; if (other.getNotification() != null && other.getNotification().equals(this.getNotification()) == false) return false; if (other.getForwardingAddressId() == null ^ this.getForwardingAddressId() == null) return false; if (other.getForwardingAddressId() != null && other.getForwardingAddressId().equals(this.getForwardingAddressId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getClusterId() == null) ? 0 : getClusterId().hashCode()); hashCode = prime * hashCode + ((getRoleARN() == null) ? 0 : getRoleARN().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getResources() == null) ? 0 : getResources().hashCode()); hashCode = prime * hashCode + ((getAddressId() == null) ? 0 : getAddressId().hashCode()); hashCode = prime * hashCode + ((getShippingOption() == null) ? 0 : getShippingOption().hashCode()); hashCode = prime * hashCode + ((getNotification() == null) ? 0 : getNotification().hashCode()); hashCode = prime * hashCode + ((getForwardingAddressId() == null) ? 0 : getForwardingAddressId().hashCode()); return hashCode; } @Override public UpdateClusterRequest clone() { return (UpdateClusterRequest) super.clone(); } }
// Copyright (C) 2015 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.googlesource.gerrit.plugins.websession.flatfile; import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gerrit.httpd.WebSessionManager.Val; import com.googlesource.gerrit.plugins.websession.flatfile.FlatFileWebSessionCache.TimeMachine; import java.io.IOException; import java.io.InputStream; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; public class FlatFileWebSessionCacheTest { private static final String EXISTING_KEY = "aSceprtBc02YaMY573T5jfW64ZudJfPbDq"; private static final String EMPTY_KEY = "aOc2prqlZRpSO3LpauGO5efCLs1L9r9KkG"; private static final String INVALID_KEY = "aOFdpHriBM6dN055M13PjDdTZagl5r5aSG"; private static final String NEW_KEY = "abcde12345"; @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); private FlatFileWebSessionCache cache; private Path websessionDir; @Before public void createFlatFileWebSessionCache() throws Exception { websessionDir = tempFolder.newFolder("websessions").toPath(); cache = new FlatFileWebSessionCache(websessionDir); } @Test public void asMapTest() throws Exception { loadKeyToCacheDir(EMPTY_KEY); assertThat(cache.asMap()).isEmpty(); loadKeyToCacheDir(INVALID_KEY); assertThat(cache.asMap()).isEmpty(); loadKeyToCacheDir(EXISTING_KEY); assertThat(cache.asMap()).containsKey(EXISTING_KEY); } @Test public void constructorCreateDir() throws IOException { assertThat(websessionDir.toFile().delete()).isTrue(); cache = new FlatFileWebSessionCache(websessionDir); assertThat(websessionDir.toFile().exists()).isTrue(); } @Test public void cleanUpTest() throws Exception { loadKeyToCacheDir(EXISTING_KEY); try { long existingKeyExpireAt = cache.getIfPresent(EXISTING_KEY).getExpiresAt(); TimeMachine.useFixedClockAt( Instant.ofEpochMilli(existingKeyExpireAt).minus(1, ChronoUnit.HOURS)); cache.cleanUp(); assertThat(isDirEmpty(websessionDir)).isFalse(); TimeMachine.useFixedClockAt( Instant.ofEpochMilli(existingKeyExpireAt).plus(1, ChronoUnit.HOURS)); cache.cleanUp(); assertThat(isDirEmpty(websessionDir)).isTrue(); } finally { TimeMachine.useSystemDefaultZoneClock(); } } @Test public void cleanUpWithErrorsWhileListingFilesTest() throws Exception { tempFolder.delete(); cache.cleanUp(); assertThat(cache.size()).isEqualTo(0); } @Test public void cleanUpWithErrorsWhileDeleteFileTest() throws Exception { loadKeyToCacheDir(EXISTING_KEY); try { websessionDir.toFile().setWritable(false); cache.cleanUp(); assertThat(cache.size()).isEqualTo(1); } finally { websessionDir.toFile().setWritable(true); } } @Test public void getIfPresentEmptyKeyTest() throws Exception { assertThat(cache.getIfPresent(EMPTY_KEY)).isNull(); } @Test public void getIfPresentObjectNonStringTest() throws Exception { assertThat(cache.getIfPresent(new Object())).isNull(); } @Test public void getIfPresentInvalidKeyTest() throws Exception { loadKeyToCacheDir(INVALID_KEY); Path path = websessionDir.resolve(INVALID_KEY); assertThat(cache.getIfPresent(path.toString())).isNull(); } @Test public void getIfPresentTest() throws Exception { loadKeyToCacheDir(EXISTING_KEY); assertThat(cache.getIfPresent(EXISTING_KEY)).isNotNull(); } @Test public void getAllPresentTest() throws Exception { loadKeyToCacheDir(EMPTY_KEY); loadKeyToCacheDir(INVALID_KEY); loadKeyToCacheDir(EXISTING_KEY); List<String> keys = ImmutableList.of(EMPTY_KEY, EXISTING_KEY); assertThat(cache.getAllPresent(keys).size()).isEqualTo(1); assertThat(cache.getAllPresent(keys)).containsKey(EXISTING_KEY); } @Test public void getTest() throws Exception { class ValueLoader implements Callable<Val> { @Override public Val call() throws Exception { return null; } } assertThat(cache.get(EXISTING_KEY, new ValueLoader())).isNull(); loadKeyToCacheDir(EXISTING_KEY); assertThat(cache.get(EXISTING_KEY, new ValueLoader())).isNotNull(); } @Test(expected = ExecutionException.class) public void getTestCallableThrowsException() throws Exception { class ValueLoader implements Callable<Val> { @Override public Val call() throws Exception { throw new Exception(); } } assertThat(cache.get(EXISTING_KEY, new ValueLoader())).isNull(); } @Test public void invalidateAllCollectionTest() throws Exception { int numberOfKeys = 15; List<String> keys = loadKeysToCacheDir(numberOfKeys); assertThat(cache.size()).isEqualTo(numberOfKeys); assertThat(isDirEmpty(websessionDir)).isFalse(); cache.invalidateAll(keys); assertThat(cache.size()).isEqualTo(0); assertThat(isDirEmpty(websessionDir)).isTrue(); } @Test public void invalidateAllTest() throws Exception { int numberOfKeys = 5; loadKeysToCacheDir(numberOfKeys); assertThat(cache.size()).isEqualTo(numberOfKeys); assertThat(isDirEmpty(websessionDir)).isFalse(); cache.invalidateAll(); assertThat(cache.size()).isEqualTo(0); assertThat(isDirEmpty(websessionDir)).isTrue(); } @Test public void invalidateTest() throws Exception { Path fileToDelete = Files.createFile(websessionDir.resolve(EXISTING_KEY)); assertThat(Files.exists(fileToDelete)).isTrue(); cache.invalidate(EXISTING_KEY); assertThat(Files.exists(fileToDelete)).isFalse(); } @Test public void invalidateTestObjectNotString() throws Exception { loadKeyToCacheDir(EXISTING_KEY); assertThat(cache.size()).isEqualTo(1); cache.invalidate(new Object()); assertThat(cache.size()).isEqualTo(1); } @Test public void putTest() throws Exception { loadKeyToCacheDir(EXISTING_KEY); Val val = cache.getIfPresent(EXISTING_KEY); cache.put(NEW_KEY, val); assertThat(cache.getIfPresent(NEW_KEY)).isNotNull(); } @Test public void putAllTest() throws Exception { loadKeyToCacheDir(EXISTING_KEY); Val val = cache.getIfPresent(EXISTING_KEY); Map<String, Val> sessions = ImmutableMap.of(NEW_KEY, val); cache.putAll(sessions); assertThat(cache.asMap()).containsKey(NEW_KEY); } @Test public void putWithErrorsTest() throws Exception { loadKeyToCacheDir(EXISTING_KEY); Val val = cache.getIfPresent(EXISTING_KEY); tempFolder.delete(); cache.put(NEW_KEY, val); assertThat(cache.getIfPresent(NEW_KEY)).isNull(); } @Test public void sizeTest() throws Exception { int numberOfKeys = 10; loadKeysToCacheDir(numberOfKeys); assertThat(cache.size()).isEqualTo(numberOfKeys); } @Test public void statTest() throws Exception { assertThat(cache.stats()).isNull(); } private List<String> loadKeysToCacheDir(int number) throws IOException { List<String> keys = new ArrayList<>(); for (int i = 0; i < number; i++) { Path tmp = Files.createTempFile(websessionDir, "cache", null); keys.add(tmp.getFileName().toString()); } return keys; } private boolean isDirEmpty(final Path dir) throws IOException { try (DirectoryStream<Path> dirStream = Files.newDirectoryStream(dir)) { return !dirStream.iterator().hasNext(); } } private Path loadKeyToCacheDir(String key) throws IOException { if (key.equals(EMPTY_KEY)) { return Files.createFile(websessionDir.resolve(EMPTY_KEY)); } try (InputStream in = loadFile(key)) { Path target = websessionDir.resolve(key); Files.copy(in, target, StandardCopyOption.REPLACE_EXISTING); return target; } } private InputStream loadFile(String file) { return this.getClass().getResourceAsStream("/" + file); } }
/******************************************************************************* * ALMA - Atacama Large Millimeter Array * Copyright (c) ESO - European Southern Observatory, 2011 * (in the framework of the ALMA collaboration). * All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA *******************************************************************************/ package alma.acs.service; import gov.sandia.CosNotification.NotificationServiceMonitorControl; import gov.sandia.CosNotification.NotificationServiceMonitorControlPackage.InvalidName; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.List; //import java.util.logging.Logger; import alma.acs.exceptions.AcsJException; //import alma.acs.logging.AcsLogLevel; import alma.acs.nsstatistics.ChannelData; class ChannelStats { static final String N_CONSUMERS = "n. consumers"; static final String N_SUPPLIERS = "n. suppliers"; static final String N_SLOWEST_CONSUMERS = "n. slowest consumers"; static final String N_SUPPLIERS_ADMIN = "n. suppliers admin"; static final String N_CONSUMERS_ADMIN = "n. consumers admin"; static final String CURR_SLOWEST_CONSUMERS = "Current slowest consumers"; static final String CURR_CONSUMERS = "Current consumers"; static final String CURR_SUPPLIERS = "Current suppliers"; static final String CURR_CONSUMERS_ADMIN = "Current consumers admin"; static final String CURR_SUPPLIERS_ADMIN = "Current consumers admin"; static final String ALL_SLOWEST_CONSUMERS = "All slowest consumers"; static final String ALL_CONSUMERS = "All consumers"; static final String ALL_SUPPLIERS = "All suppliers"; static final String ALL_CONSUMERS_ADMIN = "All consumers admin"; static final String ALL_SUPPLIERS_ADMIN = "All consumers admin"; class AggregateData { public long max; public long min; public float avg; public long sum; public AggregateData() { max = 0; min = 0; avg = 0; sum = 0; } public AggregateData(long value) { max = value; min = value; avg = (float)value; sum = value; } public void set(long value, long counter) { if(max < value || counter == 0) { max = value; } if(min > value || counter == 0) { min = value; } sum += value; avg = sum / (counter + 1); } public String toString() { return "MIN=" + String.valueOf(min) + ", MAX=" + String.valueOf(max) + ", AVG=" + String.valueOf(avg); } } private HashMap<String, AggregateData> aggregateData; private HashMap<String, ArrayList<String>> currStringLists; // Current slowest consumers, consumers, suppliers, consumers admin, suppliers admin private HashMap<String, ArrayList<String>> allStringLists; // All slowest consumers, consumers, suppliers, consumers admin, suppliers admin private HashMap<String, AggregateData> queuesElementCount; // key: Queue name private HashMap<String, AggregateData> queuesSize; // key: Queue name private HashMap<String, Double> queuesSizeAvg; // key: Queue name private HashMap<String, Long> queuesSizeMax; // key: Queue name private long oldestEvent; public long counter; ChannelStats() { aggregateData = new HashMap<String, AggregateData>(); currStringLists = new HashMap<String, ArrayList<String>>(); allStringLists = new HashMap<String, ArrayList<String>>(); queuesElementCount = new HashMap<String, AggregateData>(); queuesSize = new HashMap<String, AggregateData>(); queuesSizeAvg = new HashMap<String, Double>(); queuesSizeMax = new HashMap<String, Long>(); oldestEvent = -1; counter = 0; } public void reset() { counter = 0; aggregateData.clear(); currStringLists.clear(); allStringLists.clear(); queuesElementCount.clear(); queuesSize.clear(); queuesSizeAvg.clear(); queuesSizeMax.clear(); oldestEvent = -1; } public void setData(ChannelData channel) { NotificationServiceMonitorControl nsm = channel.getParent().getMc(); String [] statsNames = nsm.get_statistic_names(); String channelName = "Channel " + channel.getName() + " - "; long oldestEvent = -1; for(int i = 0;i < statsNames.length;++i) { try { if(statsNames[i].contains("ConsumerCount")) { Monitor.Numeric n = nsm.get_statistic(statsNames[i]).data_union.num(); setValue(N_CONSUMERS, (long)n.last); } else if(statsNames[i].contains("SupplierCount")) { Monitor.Numeric n = nsm.get_statistic(statsNames[i]).data_union.num(); setValue(N_SUPPLIERS, (long)n.last); } else if(statsNames[i].contains("QueueSize")) { // n.count, n.maximum, n.last, n.average, n.dlist[0].value, n.dlist[0].timestamp Monitor.Numeric n = nsm.get_statistic(statsNames[i]).data_union.num(); setQueueSize(statsNames[i], (long)n.maximum, n.average); } else if(statsNames[i].contains("QueueElementCount")) { Monitor.Numeric n = nsm.get_statistic(statsNames[i]).data_union.num(); setQueueElementCount(statsNames[i], (long)n.maximum); } else if(statsNames[i].contains("OldestEvent")) { Monitor.Numeric n = nsm.get_statistic(statsNames[i]).data_union.num(); this.oldestEvent = (long)n.last; } else if(statsNames[i].contains("SlowestConsumers")) { String [] slowestConsumers = nsm.get_statistic(statsNames[i]).data_union.list(); setArrayValues(CURR_SLOWEST_CONSUMERS, currStringLists, slowestConsumers); addArrayValues(ALL_SLOWEST_CONSUMERS, allStringLists, slowestConsumers); setValue(N_SLOWEST_CONSUMERS, slowestConsumers.length); } else if(statsNames[i].contains("SupplierNames")) { String [] supplierNames = nsm.get_statistic(statsNames[i]).data_union.list(); setArrayValues(CURR_SUPPLIERS, currStringLists, supplierNames); addArrayValues(ALL_SUPPLIERS, allStringLists, supplierNames); } else if(statsNames[i].contains("ConsumerNames")) { String [] consumerNames = nsm.get_statistic(statsNames[i]).data_union.list(); setArrayValues(CURR_CONSUMERS, currStringLists, consumerNames); addArrayValues(ALL_CONSUMERS, allStringLists, consumerNames); } else if(statsNames[i].contains("SupplierAdminNames")) { String [] supplierAdminNames = nsm.get_statistic(statsNames[i]).data_union.list(); setArrayValues(CURR_SUPPLIERS_ADMIN, currStringLists, supplierAdminNames); addArrayValues(ALL_SUPPLIERS_ADMIN, allStringLists, supplierAdminNames); setValue(N_SUPPLIERS_ADMIN, supplierAdminNames.length); } else if(statsNames[i].contains("ConsumerAdminNames")) { String [] consumerAdminNames = nsm.get_statistic(statsNames[i]).data_union.list(); setArrayValues(CURR_CONSUMERS_ADMIN, currStringLists, consumerAdminNames); addArrayValues(ALL_CONSUMERS_ADMIN, allStringLists, consumerAdminNames); setValue(N_CONSUMERS_ADMIN, consumerAdminNames.length); } } catch(InvalidName ex) { //logger.log(AcsLogLevel.ERROR, "Invalid name in ncStatisticsService::logMCStats", ex); // TODO } } iterationDone(); } protected void setArrayValues(String key,HashMap<String,ArrayList<String>> data,String [] values) { ArrayList<String> array = new ArrayList<String>(); for(int i = 0;i < values.length;++i) { array.add(values[i]); } data.put(key, array); } protected void setQueueElementCount(String name,long value) { int i = name.indexOf("QueueElementCount"); String queueName = name.substring(0, i); AggregateData data = queuesElementCount.get(queueName); if(null == data) { data = new AggregateData(value); } else { data.set(value, counter); } queuesElementCount.put(queueName, data); } protected void setQueueSize(String name,long value,double average) { int i = name.indexOf("QueueSize"); String queueName = name.substring(0, i); AggregateData data = queuesSize.get(queueName); if(null == data) { data = new AggregateData(value); } else { data.set(value, counter); } queuesSize.put(queueName, data); Double lastAvg = queuesSizeAvg.get(queueName); if(lastAvg == null) { queuesSizeAvg.put(queueName, new Double(average)); } else { queuesSizeAvg.put(queueName, lastAvg + average); } Long lastMax = queuesSizeMax.get(queueName); if(lastMax == null || lastMax < value) { queuesSizeMax.put(queueName, new Long(value)); } } protected void addArrayValues(String key,HashMap<String,ArrayList<String>> data,String [] values) { ArrayList<String> array = null; if(false == data.containsKey(key)) { array = new ArrayList<String>(); } else { array = data.get(key); } for(int i = 0;i < values.length;++i) { if(false == array.contains(values[i])) { array.add(values[i]); } } data.put(key, array); } public void setValue(String key,long value) { AggregateData data = null; if(0 == counter) { data = new AggregateData(value); } else { data = aggregateData.get(key); if(null == data) { data = new AggregateData(value); } else { data.set(value, counter); } } aggregateData.put(key, data); } public Long getMaxValue(String key) { if(0 == counter) { return null; } AggregateData data = aggregateData.get(key); if(null == data) { return null; } return data.max; } public Long getMinValue(String key) { if(0 == counter) { return null; } AggregateData data = aggregateData.get(key); if(null == data) { return null; } return data.min; } public Float getAvgValue(String key) { if(0 == counter) { return null; } AggregateData data = aggregateData.get(key); if(null == data) { return null; } return data.avg; } public long iterationDone() { counter++; return counter; } public Map<String,String> getDbgParams() { Map<String,String> dbgParams = new HashMap<String,String>(); String str = ""; AggregateData data = null; ArrayList<String> strList = null; data = aggregateData.get(N_CONSUMERS); dbgParams.put("Num consumers",data.toString()); data = aggregateData.get(N_SUPPLIERS); dbgParams.put("Num suppliers",data.toString()); data = aggregateData.get(N_SLOWEST_CONSUMERS); dbgParams.put("Num slowest consumers",data.toString()); data = aggregateData.get(N_CONSUMERS_ADMIN); dbgParams.put("Num consumers admin",data.toString()); data = aggregateData.get(N_SUPPLIERS_ADMIN); dbgParams.put("Num suppliers admin",data.toString()); strList = currStringLists.get(CURR_CONSUMERS); dbgParams.put("Current consumers", array2str(strList)); strList = currStringLists.get(CURR_SUPPLIERS); dbgParams.put("Current suppliers", array2str(strList)); strList = currStringLists.get(CURR_SLOWEST_CONSUMERS); dbgParams.put("Current slowest consumers", array2str(strList)); strList = currStringLists.get(CURR_CONSUMERS_ADMIN); dbgParams.put("Current consumers admin", array2str(strList)); strList = currStringLists.get(CURR_SUPPLIERS_ADMIN); dbgParams.put("Current suppliers admin", array2str(strList)); strList = allStringLists.get(ALL_CONSUMERS); dbgParams.put("All consumers", array2str(strList)); strList = allStringLists.get(ALL_SUPPLIERS); dbgParams.put("All suppliers", array2str(strList)); strList = allStringLists.get(ALL_SLOWEST_CONSUMERS); dbgParams.put("All slowest consumers", array2str(strList)); strList = allStringLists.get(ALL_CONSUMERS_ADMIN); dbgParams.put("All consumers admin", array2str(strList)); strList = allStringLists.get(ALL_SUPPLIERS_ADMIN); dbgParams.put("All suppliers admin", array2str(strList)); return dbgParams; } public Map<String,String> getInfoParams(ServiceParameters params) { Map<String,String> infoParams = new HashMap<String,String>(); String str = "", strMax = "", strAvg = ""; AggregateData data = null; ArrayList<String> strList = null; long lMax = 0; double dMax = 0; data = aggregateData.get(N_CONSUMERS); infoParams.put("Num consumers",data.toString()); data = aggregateData.get(N_SUPPLIERS); infoParams.put("Num suppliers",data.toString()); strMax = ""; strAvg = ""; lMax = 0; for(Map.Entry<String, AggregateData> entry : queuesElementCount.entrySet()) { strMax += String.valueOf(entry.getValue().max) + ", "; strAvg += String.valueOf(entry.getValue().avg) + ", "; if(entry.getValue().max > lMax) { lMax = entry.getValue().max; } } infoParams.put("Max events in queues", strMax); infoParams.put("Avg events in queues", strAvg); if(lMax >= params.getThOldestEvent()) { infoParams.put("Oldest event", String.valueOf(oldestEvent)); } str = ""; lMax = 0; for(Map.Entry<String, Long> entry : queuesSizeMax.entrySet()) { str += entry.getValue().toString() + ", "; if(entry.getValue() > lMax) { lMax = entry.getValue().longValue(); } } if(lMax >= params.getThQueueSize()) { infoParams.put("Max size of queues [bytes]", str); } str = ""; for(Map.Entry<String, Double> entry : queuesSizeAvg.entrySet()) { double avg = entry.getValue().doubleValue(); if(counter == 0) { avg = 0; } else { avg = avg / counter; } str += String.valueOf(avg) + ", "; } if(lMax >= params.getThQueueSize()) { infoParams.put("Avg size of queues [bytes]", str); } strList = currStringLists.get(CURR_SLOWEST_CONSUMERS); if(strList.size() > 0) { infoParams.put("Current slowest consumers", array2str(strList)); } strList = allStringLists.get(ALL_SLOWEST_CONSUMERS); if(strList.size() > 0) { infoParams.put("All slowest consumers", array2str(strList)); } return infoParams; } protected String array2str(List<String> list) { if(list.size() <= 0) { return ""; } String str = ""; for(int i = 0;i < list.size() - 1;++i) { str += list.get(i) + ","; } return str + list.get(list.size() - 1); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.vcs.log.graph.linearBek; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Condition; import com.intellij.util.containers.ContainerUtil; import com.intellij.vcs.log.graph.actions.GraphAction; import com.intellij.vcs.log.graph.api.EdgeFilter; import com.intellij.vcs.log.graph.api.GraphLayout; import com.intellij.vcs.log.graph.api.LinearGraph; import com.intellij.vcs.log.graph.api.elements.GraphEdge; import com.intellij.vcs.log.graph.api.elements.GraphEdgeType; import com.intellij.vcs.log.graph.api.elements.GraphElement; import com.intellij.vcs.log.graph.api.elements.GraphNode; import com.intellij.vcs.log.graph.api.permanent.PermanentGraphInfo; import com.intellij.vcs.log.graph.impl.facade.BekBaseController; import com.intellij.vcs.log.graph.impl.facade.CascadeController; import com.intellij.vcs.log.graph.impl.facade.GraphChangesUtil; import com.intellij.vcs.log.graph.impl.facade.bek.BekIntMap; import com.intellij.vcs.log.graph.utils.LinearGraphUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public class LinearBekController extends CascadeController { private static final Logger LOG = Logger.getInstance(LinearBekController.class); @NotNull private final LinearBekGraph myCompiledGraph; private final LinearBekGraphBuilder myLinearBekGraphBuilder; private final BekGraphLayout myBekGraphLayout; public LinearBekController(@NotNull BekBaseController controller, @NotNull PermanentGraphInfo permanentGraphInfo) { super(controller, permanentGraphInfo); myCompiledGraph = new LinearBekGraph(getDelegateGraph()); myBekGraphLayout = new BekGraphLayout(permanentGraphInfo.getPermanentGraphLayout(), controller.getBekIntMap()); myLinearBekGraphBuilder = new LinearBekGraphBuilder(myCompiledGraph, myBekGraphLayout); long start = System.currentTimeMillis(); myLinearBekGraphBuilder.collapseAll(); LOG.info("Linear bek took " + (System.currentTimeMillis() - start) / 1000.0 + " sec"); } @NotNull @Override protected LinearGraphAnswer delegateGraphChanged(@NotNull LinearGraphAnswer delegateAnswer) { return delegateAnswer; } @Nullable @Override protected LinearGraphAnswer performAction(@NotNull LinearGraphAction action) { if (action.getAffectedElement() != null) { if (action.getType() == GraphAction.Type.MOUSE_CLICK) { GraphElement graphElement = action.getAffectedElement().getGraphElement(); if (graphElement instanceof GraphNode) { LinearGraphAnswer answer = collapseNode((GraphNode)graphElement); if (answer != null) return answer; for (GraphEdge dottedEdge : getAllAdjacentDottedEdges((GraphNode)graphElement)) { LinearGraphAnswer expandedAnswer = expandEdge(dottedEdge); if (expandedAnswer != null) return expandedAnswer; } } else if (graphElement instanceof GraphEdge) { return expandEdge((GraphEdge)graphElement); } } else if (action.getType() == GraphAction.Type.MOUSE_OVER) { GraphElement graphElement = action.getAffectedElement().getGraphElement(); if (graphElement instanceof GraphNode) { LinearGraphAnswer answer = highlightNode((GraphNode)graphElement); if (answer != null) return answer; for (GraphEdge dottedEdge : getAllAdjacentDottedEdges((GraphNode)graphElement)) { LinearGraphAnswer highlightAnswer = highlightEdge(dottedEdge); if (highlightAnswer != null) return highlightAnswer; } } else if (graphElement instanceof GraphEdge) { return highlightEdge((GraphEdge)graphElement); } } } else if (action.getType() == GraphAction.Type.BUTTON_COLLAPSE) { return collapseAll(); } else if (action.getType() == GraphAction.Type.BUTTON_EXPAND) { return expandAll(); } return null; } @NotNull private List<GraphEdge> getAllAdjacentDottedEdges(GraphNode graphElement) { return ContainerUtil.filter(myCompiledGraph.getAdjacentEdges(graphElement.getNodeIndex(), EdgeFilter.ALL), new Condition<GraphEdge>() { @Override public boolean value(GraphEdge graphEdge) { return graphEdge.getType() == GraphEdgeType.DOTTED; } }); } @NotNull private LinearGraphAnswer expandAll() { return new LinearGraphAnswer(GraphChangesUtil.SOME_CHANGES) { @Nullable @Override public Runnable getGraphUpdater() { return new Runnable() { @Override public void run() { myCompiledGraph.myDottedEdges.removeAll(); myCompiledGraph.myHiddenEdges.removeAll(); } }; } }; } @NotNull private LinearGraphAnswer collapseAll() { final LinearBekGraph.WorkingLinearBekGraph workingGraph = new LinearBekGraph.WorkingLinearBekGraph(myCompiledGraph); new LinearBekGraphBuilder(workingGraph, myBekGraphLayout).collapseAll(); return new LinearGraphAnswer( GraphChangesUtil.edgesReplaced(workingGraph.getRemovedEdges(), workingGraph.getAddedEdges(), getDelegateGraph())) { @Nullable @Override public Runnable getGraphUpdater() { return new Runnable() { @Override public void run() { workingGraph.applyChanges(); } }; } }; } @Nullable private LinearGraphAnswer highlightNode(GraphNode node) { Set<LinearBekGraphBuilder.MergeFragment> toCollapse = collectFragmentsToCollapse(node); if (toCollapse.isEmpty()) return null; Set<Integer> toHighlight = ContainerUtil.newHashSet(); for (LinearBekGraphBuilder.MergeFragment fragment : toCollapse) { toHighlight.addAll(fragment.getAllNodes()); } return LinearGraphUtils.createSelectedAnswer(myCompiledGraph, toHighlight); } @Nullable private LinearGraphAnswer highlightEdge(GraphEdge edge) { if (edge.getType() == GraphEdgeType.DOTTED) { return LinearGraphUtils.createSelectedAnswer(myCompiledGraph, ContainerUtil.set(edge.getUpNodeIndex(), edge.getDownNodeIndex())); } return null; } @Nullable private LinearGraphAnswer collapseNode(GraphNode node) { SortedSet<Integer> toCollapse = collectNodesToCollapse(node); if (toCollapse.isEmpty()) return null; for (Integer i : toCollapse) { myLinearBekGraphBuilder.collapseFragment(i); } return new LinearGraphAnswer(GraphChangesUtil.SOME_CHANGES); } private SortedSet<Integer> collectNodesToCollapse(GraphNode node) { SortedSet<Integer> toCollapse = new TreeSet<Integer>(new Comparator<Integer>() { @Override public int compare(Integer o1, Integer o2) { return o2.compareTo(o1); } }); for (LinearBekGraphBuilder.MergeFragment f : collectFragmentsToCollapse(node)) { toCollapse.add(f.getParent()); toCollapse.addAll(f.getTailsAndBody()); } return toCollapse; } @NotNull private Set<LinearBekGraphBuilder.MergeFragment> collectFragmentsToCollapse(GraphNode node) { Set<LinearBekGraphBuilder.MergeFragment> result = ContainerUtil.newHashSet(); int mergesCount = 0; LinkedHashSet<Integer> toProcess = ContainerUtil.newLinkedHashSet(); toProcess.add(node.getNodeIndex()); while (!toProcess.isEmpty()) { Integer i = ContainerUtil.getFirstItem(toProcess); toProcess.remove(i); LinearBekGraphBuilder.MergeFragment fragment = myLinearBekGraphBuilder.getFragment(i); if (fragment == null) continue; result.add(fragment); toProcess.addAll(fragment.getTailsAndBody()); mergesCount++; if (mergesCount > 10) break; } return result; } @Nullable private LinearGraphAnswer expandEdge(GraphEdge edge) { if (edge.getType() == GraphEdgeType.DOTTED) { return new LinearGraphAnswer( GraphChangesUtil.edgesReplaced(Collections.singleton(edge), myCompiledGraph.expandEdge(edge), getDelegateGraph())); } return null; } @NotNull private LinearGraph getDelegateGraph() { return getDelegateController().getCompiledGraph(); } @NotNull @Override public LinearGraph getCompiledGraph() { return myCompiledGraph; } private static class BekGraphLayout implements GraphLayout { private final GraphLayout myGraphLayout; private final BekIntMap myBekIntMap; public BekGraphLayout(GraphLayout graphLayout, BekIntMap bekIntMap) { myGraphLayout = graphLayout; myBekIntMap = bekIntMap; } @Override public int getLayoutIndex(int nodeIndex) { return myGraphLayout.getLayoutIndex(myBekIntMap.getUsualIndex(nodeIndex)); } @Override public int getOneOfHeadNodeIndex(int nodeIndex) { int usualIndex = myGraphLayout.getOneOfHeadNodeIndex(myBekIntMap.getUsualIndex(nodeIndex)); return myBekIntMap.getBekIndex(usualIndex); } @NotNull @Override public List<Integer> getHeadNodeIndex() { List<Integer> bekIndexes = new ArrayList<Integer>(); for (int head : myGraphLayout.getHeadNodeIndex()) { bekIndexes.add(myBekIntMap.getBekIndex(head)); } return bekIndexes; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive.parquet; import com.facebook.presto.hive.HiveColumnHandle; import com.facebook.presto.hive.HivePartitionKey; import com.facebook.presto.hive.HiveUtil; import com.facebook.presto.hive.parquet.reader.ParquetReader; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.block.BlockBuilderStatus; import com.facebook.presto.spi.block.LazyBlock; import com.facebook.presto.spi.block.LazyBlockLoader; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.DecimalType; import com.facebook.presto.spi.type.FixedWidthType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import io.airlift.units.DataSize; import org.apache.hadoop.fs.Path; import org.joda.time.DateTimeZone; import parquet.column.ColumnDescriptor; import parquet.schema.MessageType; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Properties; import static com.facebook.presto.hive.HiveErrorCode.HIVE_CURSOR_ERROR; import static com.facebook.presto.hive.HiveUtil.bigintPartitionKey; import static com.facebook.presto.hive.HiveUtil.booleanPartitionKey; import static com.facebook.presto.hive.HiveUtil.charPartitionKey; import static com.facebook.presto.hive.HiveUtil.datePartitionKey; import static com.facebook.presto.hive.HiveUtil.doublePartitionKey; import static com.facebook.presto.hive.HiveUtil.getPrefilledColumnValue; import static com.facebook.presto.hive.HiveUtil.integerPartitionKey; import static com.facebook.presto.hive.HiveUtil.longDecimalPartitionKey; import static com.facebook.presto.hive.HiveUtil.shortDecimalPartitionKey; import static com.facebook.presto.hive.HiveUtil.smallintPartitionKey; import static com.facebook.presto.hive.HiveUtil.timestampPartitionKey; import static com.facebook.presto.hive.HiveUtil.tinyintPartitionKey; import static com.facebook.presto.hive.HiveUtil.varcharPartitionKey; import static com.facebook.presto.hive.parquet.ParquetTypeUtils.getParquetType; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.Chars.isCharType; import static com.facebook.presto.spi.type.DateType.DATE; import static com.facebook.presto.spi.type.Decimals.isLongDecimal; import static com.facebook.presto.spi.type.Decimals.isShortDecimal; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.IntegerType.INTEGER; import static com.facebook.presto.spi.type.SmallintType.SMALLINT; import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP; import static com.facebook.presto.spi.type.TinyintType.TINYINT; import static com.facebook.presto.spi.type.Varchars.isVarcharType; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Maps.uniqueIndex; import static java.lang.String.format; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; class ParquetPageSource implements ConnectorPageSource { private static final int MAX_VECTOR_LENGTH = 1024; private static final long GUESSED_MEMORY_USAGE = new DataSize(16, DataSize.Unit.MEGABYTE).toBytes(); private final ParquetReader parquetReader; private final ParquetDataSource dataSource; private final MessageType requestedSchema; // for debugging heap dump private final List<String> columnNames; private final List<Type> types; private final Block[] constantBlocks; private final int[] hiveColumnIndexes; private final long totalBytes; private int batchId; private boolean closed; private long readTimeNanos; public ParquetPageSource( ParquetReader parquetReader, ParquetDataSource dataSource, MessageType fileSchema, MessageType requestedSchema, long totalBytes, Properties splitSchema, List<HiveColumnHandle> columns, List<HivePartitionKey> partitionKeys, TupleDomain<HiveColumnHandle> effectivePredicate, DateTimeZone hiveStorageTimeZone, TypeManager typeManager, boolean useParquetColumnNames, Path path) { checkArgument(totalBytes >= 0, "totalBytes is negative"); requireNonNull(splitSchema, "splitSchema is null"); requireNonNull(columns, "columns is null"); requireNonNull(partitionKeys, "partitionKeys is null"); requireNonNull(effectivePredicate, "effectivePredicate is null"); this.parquetReader = parquetReader; this.dataSource = dataSource; this.requestedSchema = requestedSchema; this.totalBytes = totalBytes; Map<String, HivePartitionKey> partitionKeysByName = uniqueIndex(requireNonNull(partitionKeys, "partitionKeys is null"), HivePartitionKey::getName); int size = requireNonNull(columns, "columns is null").size(); this.constantBlocks = new Block[size]; this.hiveColumnIndexes = new int[size]; ImmutableList.Builder<String> namesBuilder = ImmutableList.builder(); ImmutableList.Builder<Type> typesBuilder = ImmutableList.builder(); for (int columnIndex = 0; columnIndex < columns.size(); columnIndex++) { HiveColumnHandle column = columns.get(columnIndex); String name = column.getName(); Type type = typeManager.getType(column.getTypeSignature()); namesBuilder.add(name); typesBuilder.add(type); hiveColumnIndexes[columnIndex] = column.getHiveColumnIndex(); if (column.isPartitionKey() || column.isHidden()) { HivePartitionKey partitionKey = partitionKeysByName.get(name); String columnValue = getPrefilledColumnValue(column, partitionKey, path); byte[] bytes = columnValue.getBytes(UTF_8); BlockBuilder blockBuilder; if (type instanceof FixedWidthType) { blockBuilder = type.createBlockBuilder(new BlockBuilderStatus(), MAX_VECTOR_LENGTH); } else { blockBuilder = type.createBlockBuilder(new BlockBuilderStatus(), MAX_VECTOR_LENGTH, bytes.length); } if (HiveUtil.isHiveNull(bytes)) { for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { blockBuilder.appendNull(); } } else if (type.equals(BOOLEAN)) { boolean value = booleanPartitionKey(columnValue, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { BOOLEAN.writeBoolean(blockBuilder, value); } } else if (type.equals(TINYINT)) { long value = tinyintPartitionKey(columnValue, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { TINYINT.writeLong(blockBuilder, value); } } else if (type.equals(SMALLINT)) { long value = smallintPartitionKey(columnValue, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { SMALLINT.writeLong(blockBuilder, value); } } else if (type.equals(INTEGER)) { long value = integerPartitionKey(columnValue, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { INTEGER.writeLong(blockBuilder, value); } } else if (type.equals(BIGINT)) { long value = bigintPartitionKey(columnValue, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { BIGINT.writeLong(blockBuilder, value); } } else if (type.equals(DOUBLE)) { double value = doublePartitionKey(columnValue, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { DOUBLE.writeDouble(blockBuilder, value); } } else if (isVarcharType(type)) { Slice value = varcharPartitionKey(columnValue, name, type); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { type.writeSlice(blockBuilder, value); } } else if (isCharType(type)) { Slice value = charPartitionKey(columnValue, name, type); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { type.writeSlice(blockBuilder, value); } } else if (type.equals(TIMESTAMP)) { long value = timestampPartitionKey(columnValue, hiveStorageTimeZone, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { TIMESTAMP.writeLong(blockBuilder, value); } } else if (type.equals(DATE)) { long value = datePartitionKey(columnValue, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { DATE.writeLong(blockBuilder, value); } } else if (isShortDecimal(type)) { long value = shortDecimalPartitionKey(columnValue, (DecimalType) type, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { type.writeLong(blockBuilder, value); } } else if (isLongDecimal(type)) { Slice value = longDecimalPartitionKey(columnValue, (DecimalType) type, name); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { type.writeSlice(blockBuilder, value); } } else { throw new PrestoException(NOT_SUPPORTED, format("Unsupported column type %s for prefilled column: %s", type.getDisplayName(), name)); } constantBlocks[columnIndex] = blockBuilder.build(); } else if (getParquetType(column, fileSchema, useParquetColumnNames) == null) { BlockBuilder blockBuilder = type.createBlockBuilder(new BlockBuilderStatus(), MAX_VECTOR_LENGTH); for (int i = 0; i < MAX_VECTOR_LENGTH; i++) { blockBuilder.appendNull(); } constantBlocks[columnIndex] = blockBuilder.build(); } } types = typesBuilder.build(); columnNames = namesBuilder.build(); } @Override public long getTotalBytes() { return totalBytes; } @Override public long getCompletedBytes() { return dataSource.getReadBytes(); } @Override public long getReadTimeNanos() { return readTimeNanos; } @Override public boolean isFinished() { return closed; } @Override public long getSystemMemoryUsage() { return GUESSED_MEMORY_USAGE; } @Override public Page getNextPage() { try { batchId++; long start = System.nanoTime(); int batchSize = parquetReader.nextBatch(); readTimeNanos += System.nanoTime() - start; if (closed || batchSize <= 0) { close(); return null; } Block[] blocks = new Block[hiveColumnIndexes.length]; for (int fieldId = 0; fieldId < blocks.length; fieldId++) { Type type = types.get(fieldId); if (constantBlocks[fieldId] != null) { blocks[fieldId] = constantBlocks[fieldId].getRegion(0, batchSize); } else { int fieldIndex = requestedSchema.getFieldIndex(columnNames.get(fieldId)); ColumnDescriptor columnDescriptor = requestedSchema.getColumns().get(fieldIndex); blocks[fieldId] = new LazyBlock(batchSize, new ParquetBlockLoader(columnDescriptor, type)); } } return new Page(batchSize, blocks); } catch (PrestoException e) { closeWithSuppression(e); throw e; } catch (IOException | RuntimeException | InterruptedException e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } closeWithSuppression(e); throw new PrestoException(HIVE_CURSOR_ERROR, e); } } protected void closeWithSuppression(Throwable throwable) { requireNonNull(throwable, "throwable is null"); try { close(); } catch (RuntimeException e) { // Self-suppression not permitted if (e != throwable) { throwable.addSuppressed(e); } } } @Override public void close() { if (closed) { return; } closed = true; try { parquetReader.close(); } catch (IOException e) { throw Throwables.propagate(e); } } private final class ParquetBlockLoader implements LazyBlockLoader<LazyBlock> { private final int expectedBatchId = batchId; private final ColumnDescriptor columnDescriptor; private final Type type; private boolean loaded; public ParquetBlockLoader(ColumnDescriptor columnDescriptor, Type type) { this.columnDescriptor = columnDescriptor; this.type = requireNonNull(type, "type is null"); } @Override public final void load(LazyBlock lazyBlock) { if (loaded) { return; } checkState(batchId == expectedBatchId); try { Block block = parquetReader.readBlock(columnDescriptor, type); lazyBlock.setBlock(block); } catch (IOException e) { throw new PrestoException(HIVE_CURSOR_ERROR, e); } loaded = true; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kubernetes.cluster.utils; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Set; import java.util.TreeSet; import java.util.stream.Collectors; import com.fasterxml.jackson.databind.ObjectMapper; import io.fabric8.kubernetes.api.model.ConfigMap; import io.fabric8.kubernetes.api.model.PodBuilder; import io.fabric8.kubernetes.api.model.PodListBuilder; import io.fabric8.kubernetes.client.server.mock.KubernetesMockServer; import io.fabric8.mockwebserver.utils.ResponseProvider; import okhttp3.Headers; import okhttp3.mockwebserver.RecordedRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A Test server to interact with Kubernetes for locking on a ConfigMap. */ public class LockTestServer extends KubernetesMockServer { private static final Logger LOG = LoggerFactory.getLogger(LockTestServer.class); private boolean refuseRequests; private Long delayRequests; private Set<String> pods; public LockTestServer(ConfigMapLockSimulator lockSimulator) { this(lockSimulator, Collections.emptySet()); } public LockTestServer(ConfigMapLockSimulator lockSimulator, Collection<String> initialPods) { this.pods = new TreeSet<>(initialPods); expect().get().withPath("/api/v1/namespaces/test/configmaps/" + lockSimulator.getConfigMapName()).andReply(new ResponseProvider<Object>() { ThreadLocal<Integer> responseCode = new ThreadLocal<>(); private Headers headers = new Headers.Builder().build(); @Override public int getStatusCode() { return responseCode.get(); } @Override public Object getBody(RecordedRequest recordedRequest) { delayIfNecessary(); if (refuseRequests) { responseCode.set(500); return ""; } ConfigMap map = lockSimulator.getConfigMap(); if (map != null) { responseCode.set(200); return map; } else { responseCode.set(404); return ""; } } @Override public Headers getHeaders() { return headers; } @Override public void setHeaders(Headers headers) { this.headers = headers; } }).always(); expect().post().withPath("/api/v1/namespaces/test/configmaps").andReply(new ResponseProvider<Object>() { ThreadLocal<Integer> responseCode = new ThreadLocal<>(); private Headers headers = new Headers.Builder().build(); @Override public int getStatusCode() { return responseCode.get(); } @Override public Object getBody(RecordedRequest recordedRequest) { delayIfNecessary(); if (refuseRequests) { responseCode.set(500); return ""; } ConfigMap map = convert(recordedRequest); if (map == null || map.getMetadata() == null || !lockSimulator.getConfigMapName().equals(map.getMetadata().getName())) { throw new IllegalArgumentException("Illegal configMap received"); } boolean done = lockSimulator.setConfigMap(map, true); if (done) { responseCode.set(201); return lockSimulator.getConfigMap(); } else { responseCode.set(500); return ""; } } @Override public Headers getHeaders() { return headers; } @Override public void setHeaders(Headers headers) { this.headers = headers; } }).always(); expect().put().withPath("/api/v1/namespaces/test/configmaps/" + lockSimulator.getConfigMapName()).andReply(new ResponseProvider<Object>() { ThreadLocal<Integer> responseCode = new ThreadLocal<>(); private Headers headers = new Headers.Builder().build(); @Override public int getStatusCode() { return responseCode.get(); } @Override public Object getBody(RecordedRequest recordedRequest) { delayIfNecessary(); if (refuseRequests) { responseCode.set(500); return ""; } ConfigMap map = convert(recordedRequest); boolean done = lockSimulator.setConfigMap(map, false); if (done) { responseCode.set(200); return lockSimulator.getConfigMap(); } else { responseCode.set(409); return ""; } } @Override public Headers getHeaders() { return headers; } @Override public void setHeaders(Headers headers) { this.headers = headers; } }).always(); // Other resources expect().get().withPath("/api/v1/namespaces/test/pods") .andReply(200, request -> new PodListBuilder().withNewMetadata().withResourceVersion("1").and().withItems(getCurrentPods() .stream().map(name -> new PodBuilder().withNewMetadata().withName(name).and().build()).collect(Collectors.toList())).build()) .always(); } public boolean isRefuseRequests() { return refuseRequests; } public void setRefuseRequests(boolean refuseRequests) { this.refuseRequests = refuseRequests; } public synchronized Collection<String> getCurrentPods() { return new TreeSet<>(this.pods); } public synchronized void removePod(String pod) { this.pods.remove(pod); } public synchronized void addPod(String pod) { this.pods.add(pod); } public Long getDelayRequests() { return delayRequests; } public void setDelayRequests(Long delayRequests) { this.delayRequests = delayRequests; } private void delayIfNecessary() { if (delayRequests != null) { try { Thread.sleep(delayRequests); } catch (InterruptedException e) { throw new RuntimeException(e); } } } private ConfigMap convert(RecordedRequest request) { try { ObjectMapper mapper = new ObjectMapper(); return mapper.readValue(request.getBody().readByteArray(), ConfigMap.class); } catch (IOException e) { throw new IllegalArgumentException("Erroneous data", e); } } }
package com.utilis.game.obj; import java.awt.*; import java.awt.image.BufferedImage; import javax.swing.ImageIcon; /** * A class representing any kind of custom GUI element * @author Cin316 * @see com.utilis.game.gui.Canvas */ public class HUDElement { /** * an <code>Image</code> representing this GUI element when it is not pressed down or rolled over. */ protected Image normalImage; /** * a <code>boolean</code> representing whether or not this <code>HUDElement</code> has a <code>rolloverImage</code> */ protected boolean hasRolloverImage; /** * an <code>Image</code> representing this GUI element when it is rolled over by the mouse */ protected Image rolloverImage; /** * a <code>boolean</code> representing whether or not this <code>HUDElement</code> has a <code>clickImage</code> */ protected boolean hasClickImage; /** * an <code>Image</code> representing this GUI element when it is clicked on by the mouse */ protected Image clickImage; /** * an <code>Image</code> representing the current state this <code>HUDElement</code> is in */ protected Image currentImage; /** * the current width of this <code>HUDElement</code>, measured in pixels */ protected int width; /** * the current height of this <code>HUDElement</code>, measured in pixels */ protected int height; /** * the current x position of this <code>HUDElement</code>, measured in pixels */ protected int x; /** * the current y position of this <code>HUDElement</code>, measured in pixels */ protected int y; /** * a <code>Rectangle</code> representing the area of this <code>HUDElement</code> */ protected Rectangle area; /** * an <code>int</code> representing the rollover <code>imageType</code>, used in <code>HUDElement(Image, Image, int)</code> * @see #HUDElement(Image, Image, int) */ public static final int ROLLOVER_IMAGE = 1; /** * an <code>int</code> representing the clicked <code>imageType</code>, used in <code>HUDElement(Image, Image, int)</code> * @see #HUDElement(Image, Image, int) */ public static final int CLICK_IMAGE = 2; /** * class constructor to create a blank <code>HUDElement</code> of specified width and height * @param width the width of the <code>HUDElement</code> to be created, measured in pixels * @param height the height of the <code>HUDElement</code> to be created, measured in pixels */ public HUDElement(int width, int height){ x = 0; y = 0; this.width = width; this.height = height; area = new Rectangle(x, y, width, height); normalImage = (Image) new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); rolloverImage = null; clickImage = null; hasClickImage = false; hasRolloverImage = false; currentImage = normalImage; } /** * class constructor to create a blank <code>HUDElement</code> of specified width, height, x coordinate, and y coordinate * @param width the width of the <code>HUDElement</code> to be created, measured in pixels * @param height the height of the <code>HUDElement</code> to be created, measured in pixels */ public HUDElement(int x, int y, int width, int height){ this.x = x; this.y = y; this.width = width; this.height = height; area = new Rectangle(x, y, width, height); normalImage = (Image) new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); rolloverImage = null; clickImage = null; hasClickImage = false; hasRolloverImage = false; currentImage = normalImage; } /** * class constructor to create a blank <code>HUDElement</code> with values from the specified <code>Rectangle</code> * @param r <code>Rectangle</code> to be used to create the <code>HUDElement</code> */ public HUDElement(Rectangle r){ this.x = (int) r.getX(); this.y = (int) r.getY(); this.width = (int) r.getWidth(); this.height = (int) r.getHeight(); area = r; normalImage = (Image) new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); rolloverImage = null; clickImage = null; hasClickImage = false; hasRolloverImage = false; currentImage = normalImage; } /** * class constructor to create a <code>HUDElement</code> with values and <code>Image</code> from the specified <code>Image</code> * @param i <code>Image</code> to be used to create the <code>HUDElement</code> */ public HUDElement(Image i){ x = 0; y = 0; ImageIcon iicon = new ImageIcon(i); width = iicon.getIconWidth(); height = iicon.getIconHeight(); area = new Rectangle(x, y, width, height); normalImage = i; rolloverImage = null; clickImage = null; hasClickImage = false; hasRolloverImage = false; currentImage = normalImage; } /** * class constructor to create a <code>HUDElement</code> with values and two <code>Images</code> The second <code>Image</code> uses the specified <code>int</code> to decide which <code>Image</code> it is. * @param i <code>Image</code> to be used for values and as the <code>normalImage</code> * @param secondaryImage <code>Image</code> to be used for another <code>Image</code> * @param imageType <code>int</code> to specify whether the <code>secondaryImage</code> is a <code>clickImage</code> or a <code>rolloverImage</code> * @see #CLICK_IMAGE * @see #ROLLOVER_IMAGE */ public HUDElement(Image i, Image secondaryImage, int imageType){ x = 0; y = 0; ImageIcon iicon = new ImageIcon(i); width = iicon.getIconWidth(); height = iicon.getIconHeight(); area = new Rectangle(x, y, width, height); normalImage = i; if (imageType==HUDElement.ROLLOVER_IMAGE){ rolloverImage = secondaryImage; hasRolloverImage = true; clickImage = null; hasClickImage = false; }else if(imageType==HUDElement.CLICK_IMAGE){ clickImage = secondaryImage; hasClickImage = true; rolloverImage = null; hasRolloverImage = false; }else{ //Throw error here. } currentImage = normalImage; } /** * class constructor to create a <code>HUDElement</code> with values and all three <code>Images</code> * @param i <code>Image</code> to be used for values and as the <code>normalImage</code> * @param rImage <code>Image</code> to be used for the <code>rolloverImage</code> * @param cImage <code>Image</code> to be used for the <code>clickImage</code> */ public HUDElement(Image i, Image rImage, Image cImage){ x = 0; y = 0; ImageIcon iicon = new ImageIcon(i); width = iicon.getIconWidth(); height = iicon.getIconHeight(); area = new Rectangle(x, y, width, height); normalImage = i; rolloverImage = rImage; clickImage = cImage; hasClickImage = true; hasRolloverImage = true; currentImage = normalImage; } /** * gets the <code>currentImage</code> * @return the current <code>currentImage</code> */ public Image getCurrentImage(){ return currentImage; } /** * gets the <code>normalImage</code> * @return the current <code>normalImage</code> */ public Image getNormalImage() { return normalImage; } /** * gets the value of <code>hasRolloverImage</code> * @return the current <code>hasRolloverImage</code> */ public boolean hasRolloverImage() { return hasRolloverImage; } /** * gets the <code>rolloverImage</code> * @return the current <code>rolloverImage</code> */ public Image getRolloverImage() { return rolloverImage; } /** * gets the value of <code>hasClickImage</code> * @return the current <code>hasClickImage</code> */ public boolean hasClickImage() { return hasClickImage; } /** * gets the <code>clickImage</code> * @return the current <code>clickImage</code> */ public Image getClickImage() { return clickImage; } /** * gets the <code>width</code> of this <code>HUDElement</code> * @return the current <code>width</code> of this <code>HUDElement</code> */ public int getWidth() { return width; } /** * gets the <code>height</code> of this <code>HUDElement</code> * @return the current <code>height</code> of this <code>HUDElement</code> */ public int getHeight() { return height; } /** * gets the x position of this <code>HUDElement</code> * @return the current x position of this <code>HUDElement</code> */ public int getX() { return x; } /** * gets the y position of this <code>HUDElement</code> * @return the current y position of this <code>HUDElement</code> */ public int getY() { return y; } /** * gets the <code>Rectangle</code> representing the area of this <code>HUDElement</code> * @return the current <code>Rectangle</code> representing the area of this <code>HUDElement</code> */ public Rectangle getArea() { return area; } /** * sets <code>normalImage</code> to the specified <code>Image</code> * @param normalImage the <code>Image</code> to set <code>normalImage</code> to */ public void setNormalImage(Image normalImage) { this.normalImage = normalImage; } /** * sets <code>rolloverImage</code> to the specified <code>Image</code> * @param rolloverImage the <code>Image</code> to set <code>rolloverImage</code> to */ public void setRolloverImage(Image rolloverImage) { this.rolloverImage = rolloverImage; hasRolloverImage = true; } /** * sets <code>clickImage</code> to the specified <code>Image</code> * @param clickImage the <code>Image</code> to set <code>clickImage</code> to */ public void setClickImage(Image clickImage) { this.clickImage = clickImage; hasClickImage = true; } /** * sets <code>width</code> to the specified <code>int</code> * @param width the <code>int</code> to set <code>width</code> to */ public void setWidth(int width) { this.width = width; area.setSize(width, area.height); } /** * sets <code>height</code> to the specified <code>int</code> * @param height the <code>int</code> to set <code>height</code> to */ public void setHeight(int height) { this.height = height; area.setSize(area.width, height); } /** * sets x position to the specified <code>Image</code> * @param x the <code>int</code> to set x position to */ public void setX(int x) { this.x = x; area.setLocation(x, area.y); } /** * sets y position to the specified <code>Image</code> * @param y the <code>int</code> to set y position to */ public void setY(int y) { this.y = y; area.setLocation(area.x, y); } /** * sets <code>normalImage</code> as the <code>currentImage</code> */ public void setCurrentImageToNormalImage(){ currentImage = normalImage; } /** * sets <code>rolloverImage</code> as the <code>currentImage</code> */ public void setCurrentImageToRolloverImage(){ currentImage = rolloverImage; } /** * sets <code>clickImage</code> as the <code>currentImage</code> */ public void setCurrentImageToClickImage(){ currentImage = clickImage; } }
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.reactive.function.client; import java.time.Duration; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.Consumer; import java.util.function.Predicate; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; import org.reactivestreams.Publisher; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; import org.springframework.core.NamedThreadLocal; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.codec.ClientCodecConfigurer; import org.springframework.web.reactive.function.BodyExtractors; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.mockito.ArgumentMatchers.any; import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.when; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions; /** * Unit tests for {@link DefaultWebClient}. * * @author Rossen Stoyanchev * @author Brian Clozel */ @MockitoSettings(strictness = Strictness.LENIENT) public class DefaultWebClientTests { @Mock private ExchangeFunction exchangeFunction; @Captor private ArgumentCaptor<ClientRequest> captor; private WebClient.Builder builder; @BeforeEach public void setup() { ClientResponse mockResponse = mock(ClientResponse.class); when(mockResponse.bodyToMono(Void.class)).thenReturn(Mono.empty()); given(this.exchangeFunction.exchange(this.captor.capture())).willReturn(Mono.just(mockResponse)); this.builder = WebClient.builder().baseUrl("/base").exchangeFunction(this.exchangeFunction); } @Test public void basic() { this.builder.build().get().uri("/path") .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.url().toString()).isEqualTo("/base/path"); assertThat(request.headers()).isEqualTo(new HttpHeaders()); assertThat(request.cookies()).isEqualTo(Collections.emptyMap()); } @Test public void uriBuilder() { this.builder.build().get() .uri(builder -> builder.path("/path").queryParam("q", "12").build()) .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.url().toString()).isEqualTo("/base/path?q=12"); } @Test // gh-22705 public void uriBuilderWithUriTemplate() { this.builder.build().get() .uri("/path/{id}", builder -> builder.queryParam("q", "12").build("identifier")) .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.url().toString()).isEqualTo("/base/path/identifier?q=12"); assertThat(request.attribute(WebClient.class.getName() + ".uriTemplate").get()).isEqualTo("/path/{id}"); } @Test public void uriBuilderWithPathOverride() { this.builder.build().get() .uri(builder -> builder.replacePath("/path").build()) .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.url().toString()).isEqualTo("/path"); } @Test public void requestHeaderAndCookie() { this.builder.build().get().uri("/path").accept(MediaType.APPLICATION_JSON) .cookies(cookies -> cookies.add("id", "123")) // SPR-16178 .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.headers().getFirst("Accept")).isEqualTo("application/json"); assertThat(request.cookies().getFirst("id")).isEqualTo("123"); } @Test @SuppressWarnings("deprecation") public void contextFromThreadLocal() { WebClient client = this.builder .filter((request, next) -> // Async, continue on different thread Mono.delay(Duration.ofMillis(10)).then(next.exchange(request))) .filter((request, next) -> Mono.deferContextual(contextView -> { String fooValue = contextView.get("foo"); return next.exchange(ClientRequest.from(request).header("foo", fooValue).build()); })) .build(); ThreadLocal<String> fooHolder = new ThreadLocal<>(); fooHolder.set("bar"); try { client.get().uri("/path") .context(context -> context.put("foo", fooHolder.get())) .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); } finally { fooHolder.remove(); } ClientRequest request = verifyAndGetRequest(); assertThat(request.headers().getFirst("foo")).isEqualTo("bar"); } @Test public void httpRequest() { this.builder.build().get().uri("/path") .httpRequest(httpRequest -> {}) .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.httpRequest()).isNotNull(); } @Test public void defaultHeaderAndCookie() { WebClient client = this.builder .defaultHeader("Accept", "application/json") .defaultCookie("id", "123") .build(); client.get().uri("/path") .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.headers().getFirst("Accept")).isEqualTo("application/json"); assertThat(request.cookies().getFirst("id")).isEqualTo("123"); } @Test public void defaultHeaderAndCookieOverrides() { WebClient client = this.builder .defaultHeader("Accept", "application/json") .defaultCookie("id", "123") .build(); client.get().uri("/path") .header("Accept", "application/xml") .cookie("id", "456") .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.headers().getFirst("Accept")).isEqualTo("application/xml"); assertThat(request.cookies().getFirst("id")).isEqualTo("456"); } @Test public void defaultHeaderAndCookieCopies() { WebClient client1 = this.builder .defaultHeader("Accept", "application/json") .defaultCookie("id", "123") .build(); WebClient client2 = this.builder .defaultHeader("Accept", "application/xml") .defaultCookies(cookies -> cookies.set("id", "456")) .build(); client1.get().uri("/path") .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.headers().getFirst("Accept")).isEqualTo("application/json"); assertThat(request.cookies().getFirst("id")).isEqualTo("123"); client2.get().uri("/path") .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); request = verifyAndGetRequest(); assertThat(request.headers().getFirst("Accept")).isEqualTo("application/xml"); assertThat(request.cookies().getFirst("id")).isEqualTo("456"); } @Test public void defaultRequest() { ThreadLocal<String> context = new NamedThreadLocal<>("foo"); Map<String, Object> actual = new HashMap<>(); ExchangeFilterFunction filter = (request, next) -> { actual.putAll(request.attributes()); return next.exchange(request); }; WebClient client = this.builder .defaultRequest(spec -> spec.attribute("foo", context.get())) .filter(filter) .build(); try { context.set("bar"); client.get().uri("/path").attribute("foo", "bar") .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); } finally { context.remove(); } assertThat(actual.get("foo")).isEqualTo("bar"); } @Test public void bodyObjectPublisher() { Mono<Void> mono = Mono.empty(); WebClient client = this.builder.build(); assertThatIllegalArgumentException().isThrownBy(() -> client.post().uri("https://example.com").bodyValue(mono)); } @Test public void mutateDoesCopy() { // First, build the clients WebClient.Builder builder = WebClient.builder() .filter((request, next) -> next.exchange(request)) .defaultHeader("foo", "bar") .defaultCookie("foo", "bar"); WebClient client1 = builder.build(); WebClient client2 = builder.filter((request, next) -> next.exchange(request)) .defaultHeader("baz", "qux") .defaultCookie("baz", "qux") .build(); WebClient client1a = client1.mutate() .filter((request, next) -> next.exchange(request)) .defaultHeader("baz", "qux") .defaultCookie("baz", "qux") .build(); // Now, verify what each client has.. WebClient.Builder builder1 = client1.mutate(); builder1.filters(filters -> assertThat(filters.size()).isEqualTo(1)); builder1.defaultHeaders(headers -> assertThat(headers.size()).isEqualTo(1)); builder1.defaultCookies(cookies -> assertThat(cookies.size()).isEqualTo(1)); WebClient.Builder builder2 = client2.mutate(); builder2.filters(filters -> assertThat(filters.size()).isEqualTo(2)); builder2.defaultHeaders(headers -> assertThat(headers.size()).isEqualTo(2)); builder2.defaultCookies(cookies -> assertThat(cookies.size()).isEqualTo(2)); WebClient.Builder builder1a = client1a.mutate(); builder1a.filters(filters -> assertThat(filters.size()).isEqualTo(2)); builder1a.defaultHeaders(headers -> assertThat(headers.size()).isEqualTo(2)); builder1a.defaultCookies(cookies -> assertThat(cookies.size()).isEqualTo(2)); } @Test void cloneBuilder() { Consumer<ClientCodecConfigurer> codecsConfig = c -> {}; ExchangeFunction exchangeFunction = request -> Mono.empty(); WebClient.Builder builder = WebClient.builder().baseUrl("https://example.org") .exchangeFunction(exchangeFunction) .filter((request, next) -> Mono.empty()) .codecs(codecsConfig); WebClient.Builder clonedBuilder = builder.clone(); assertThat(clonedBuilder).extracting("baseUrl").isEqualTo("https://example.org"); assertThat(clonedBuilder).extracting("filters").isNotNull(); assertThat(clonedBuilder).extracting("strategiesConfigurers").isNotNull(); assertThat(clonedBuilder).extracting("exchangeFunction").isEqualTo(exchangeFunction); } @Test public void withStringAttribute() { Map<String, Object> actual = new HashMap<>(); ExchangeFilterFunction filter = (request, next) -> { actual.putAll(request.attributes()); return next.exchange(request); }; this.builder.filter(filter).build() .get().uri("/path") .attribute("foo", "bar") .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); assertThat(actual.get("foo")).isEqualTo("bar"); ClientRequest request = verifyAndGetRequest(); assertThat(request.attribute("foo").get()).isEqualTo("bar"); } @Test public void withNullAttribute() { Map<String, Object> actual = new HashMap<>(); ExchangeFilterFunction filter = (request, next) -> { actual.putAll(request.attributes()); return next.exchange(request); }; this.builder.filter(filter).build() .get().uri("/path") .attribute("foo", null) .retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); assertThat(actual.get("foo")).isNull(); ClientRequest request = verifyAndGetRequest(); assertThat(request.attribute("foo").isPresent()).isFalse(); } @Test public void apply() { WebClient client = this.builder .apply(builder -> builder .defaultHeader("Accept", "application/json") .defaultCookie("id", "123")) .build(); client.get().uri("/path").retrieve().bodyToMono(Void.class).block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.headers().getFirst("Accept")).isEqualTo("application/json"); assertThat(request.cookies().getFirst("id")).isEqualTo("123"); } @Test public void switchToErrorOnEmptyClientResponseMono() { ExchangeFunction exchangeFunction = mock(ExchangeFunction.class); given(exchangeFunction.exchange(any())).willReturn(Mono.empty()); WebClient client = WebClient.builder().baseUrl("/base").exchangeFunction(exchangeFunction).build(); StepVerifier.create(client.get().uri("/path").retrieve().bodyToMono(Void.class)) .expectErrorMessage("The underlying HTTP client completed without emitting a response.") .verify(Duration.ofSeconds(5)); } @Test public void shouldApplyFiltersAtSubscription() { WebClient client = this.builder .filter((request, next) -> next.exchange(ClientRequest .from(request) .header("Custom", "value") .build()) ) .build(); Mono<Void> result = client.get().uri("/path").retrieve().bodyToMono(Void.class); verifyNoInteractions(this.exchangeFunction); result.block(Duration.ofSeconds(10)); ClientRequest request = verifyAndGetRequest(); assertThat(request.headers().getFirst("Custom")).isEqualTo("value"); } @Test // gh-23880 public void onStatusHandlersOrderIsPreserved() { ClientResponse response = ClientResponse.create(HttpStatus.BAD_REQUEST).build(); given(exchangeFunction.exchange(any())).willReturn(Mono.just(response)); Mono<Void> result = this.builder.build().get() .uri("/path") .retrieve() .onStatus(HttpStatus::is4xxClientError, resp -> Mono.error(new IllegalStateException("1"))) .onStatus(HttpStatus::is4xxClientError, resp -> Mono.error(new IllegalStateException("2"))) .bodyToMono(Void.class); StepVerifier.create(result).expectErrorMessage("1").verify(); } @Test // gh-23880 @SuppressWarnings("unchecked") public void onStatusHandlersDefaultHandlerIsLast() { ClientResponse response = ClientResponse.create(HttpStatus.BAD_REQUEST).build(); given(exchangeFunction.exchange(any())).willReturn(Mono.just(response)); Predicate<HttpStatus> predicate1 = mock(Predicate.class); Predicate<HttpStatus> predicate2 = mock(Predicate.class); given(predicate1.test(HttpStatus.BAD_REQUEST)).willReturn(false); given(predicate2.test(HttpStatus.BAD_REQUEST)).willReturn(false); Mono<Void> result = this.builder.build().get() .uri("/path") .retrieve() .onStatus(predicate1, resp -> Mono.error(new IllegalStateException())) .onStatus(predicate2, resp -> Mono.error(new IllegalStateException())) .bodyToMono(Void.class); StepVerifier.create(result).expectError(WebClientResponseException.class).verify(); verify(predicate1).test(HttpStatus.BAD_REQUEST); verify(predicate2).test(HttpStatus.BAD_REQUEST); } @Test // gh-26069 public void onStatusHandlersApplyForToEntityMethods() { ClientResponse response = ClientResponse.create(HttpStatus.BAD_REQUEST).build(); given(exchangeFunction.exchange(any())).willReturn(Mono.just(response)); WebClient.ResponseSpec spec = this.builder.build().get().uri("/path").retrieve(); testStatusHandlerForToEntity(spec.toEntity(String.class)); testStatusHandlerForToEntity(spec.toEntity(new ParameterizedTypeReference<String>() {})); testStatusHandlerForToEntity(spec.toEntityList(String.class)); testStatusHandlerForToEntity(spec.toEntityList(new ParameterizedTypeReference<String>() {})); testStatusHandlerForToEntity(spec.toEntityFlux(String.class)); testStatusHandlerForToEntity(spec.toEntityFlux(new ParameterizedTypeReference<String>() {})); testStatusHandlerForToEntity(spec.toEntityFlux(BodyExtractors.toFlux(String.class))); } private void testStatusHandlerForToEntity(Publisher<?> responsePublisher) { StepVerifier.create(responsePublisher).expectError(WebClientResponseException.class).verify(); } private ClientRequest verifyAndGetRequest() { ClientRequest request = this.captor.getValue(); verify(this.exchangeFunction).exchange(request); verifyNoMoreInteractions(this.exchangeFunction); return request; } }
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.presentation.pipelinehistory; import java.util.Date; import com.thoughtworks.go.domain.StageState; import com.thoughtworks.go.domain.StageResult; import com.thoughtworks.go.domain.StageIdentifier; import com.thoughtworks.go.server.presentation.models.StageConfigurationModel; import static com.thoughtworks.go.util.GoConstants.APPROVAL_SUCCESS; public class StageInstanceModel implements StageConfigurationModel { private String name; private long id; private JobHistory jobHistory; private boolean canRun; private boolean scheduled = true; // true if this stage history really happened private String approvalType; private String approvedBy; private String counter; private boolean operatePermission; private StageInstanceModel previousStage; private StageResult result; private StageIdentifier identifier; private Integer rerunOfCounter; public boolean hasOperatePermission() { return operatePermission; } public void setOperatePermission(boolean operatePermission) { this.operatePermission = operatePermission; } public boolean isSelected() { return selected; } private boolean selected; // for test public StageInstanceModel(String name, String counter, JobHistory jobHistory) { this.name = name; this.jobHistory = jobHistory; this.counter = counter; } // for test public StageInstanceModel(String name, String counter, JobHistory jobHistory, StageIdentifier identifier) { this(name, counter, jobHistory); this.identifier = identifier; } public StageInstanceModel(String name, String counter, StageResult result, StageIdentifier identifier) { this(name, counter, new JobHistory(), identifier); this.result = result; } // for ibatis public StageInstanceModel() { } public String getName() { return name; } public void setName(String name) { this.name = name; } public long getId() { return id; } public void setId(long id) { this.id = id; } public JobHistory getBuildHistory() { return jobHistory; } public void setBuildHistory(JobHistory jobHistory) { this.jobHistory = jobHistory; } public StageState getState() { return StageState.findByBuilds(jobHistory); } public String getApprovedBy() { return approvedBy; } public void setApprovedBy(String approvedBy) { this.approvedBy = approvedBy; } public String getApprovalDescription() { if (approvedBy == null) { return "Awaiting Approval"; } return "Approved by " + approvedBy; } public String getApprovalType() { return approvalType; } public String getApprovalTypeDescription() { if (isAutoApproved()) { return "auto"; } else { return "manual"; } } public boolean needsApproval() { return approvedBy == null && getState().completed(); } public boolean isAutoApproved() { return APPROVAL_SUCCESS.equals(approvalType); } public Date getScheduledDate() { return jobHistory.getScheduledDate(); } public boolean getCanRun() { return this.canRun; } public boolean getCanReRun() { return canRun; } public boolean getCanCancel() { return operatePermission && getState().isActive(); } public void setCanRun(boolean canRun) { this.canRun = canRun; } public boolean isScheduled() { return scheduled; } public void setScheduled(boolean value) { this.scheduled = value; } public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } StageInstanceModel that = (StageInstanceModel) o; if (!name.equals(that.name)) { return false; } return true; } public int hashCode() { return name.hashCode(); } public void setApprovalType(String approvalType) { this.approvalType = approvalType; } public String getCounter() { return counter; } public void setCounter(String counter) { this.counter = counter; } public void setSelected(boolean selected) { this.selected = selected; } public boolean getCanApprove() { return (getCanRun() && !isScheduled()); } public boolean hasUnsuccessfullyCompleted() { for (JobHistoryItem jobHistoryItem : jobHistory) { if(jobHistoryItem.hasUnsuccessfullyCompleted()) return true; } return false; } public boolean hasPassed() { for (JobHistoryItem jobHistoryItem : jobHistory) { if(!jobHistoryItem.hasPassed()) return false; } return true; } public boolean isRunning() { for (JobHistoryItem jobHistoryItem : jobHistory) { if(jobHistoryItem.isRunning()) return true; } return false; } public boolean hasPreviousStage() { return this.previousStage != null; } public void setPreviousStage(StageInstanceModel previousStage) { this.previousStage = previousStage; } public StageInstanceModel getPreviousStage() { return previousStage; } public StageResult getResult() { return result; } public StageIdentifier getIdentifier() { return identifier; } public String locator() { return identifier.getStageLocator(); } public boolean isRerunJobs() { return rerunOfCounter != null; } public Integer getRerunOfCounter() { return rerunOfCounter; } public void setRerunOfCounter(Integer rerunOfCounter) { this.rerunOfCounter = rerunOfCounter; } }
/* * Copyright 2016 JBoss, by Red Hat, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dashbuilder.client.navigation.widget; import java.util.List; import java.util.Stack; import javax.enterprise.context.Dependent; import javax.enterprise.event.Observes; import javax.inject.Inject; import com.google.gwt.user.client.ui.IsWidget; import org.dashbuilder.client.navigation.NavigationManager; import org.dashbuilder.client.navigation.plugin.PerspectivePluginManager; import org.dashbuilder.navigation.NavGroup; import org.dashbuilder.navigation.NavItem; import org.dashbuilder.navigation.layout.LayoutRecursionIssue; import org.dashbuilder.navigation.layout.LayoutRecursionIssueI18n; import org.dashbuilder.navigation.layout.LayoutTemplateContext; import org.dashbuilder.navigation.workbench.NavWorkbenchCtx; import org.jboss.errai.common.client.api.IsElement; import org.jboss.errai.ioc.client.container.SyncBeanManager; import org.uberfire.client.mvp.PlaceManager; import org.uberfire.ext.plugin.event.PluginSaved; import org.uberfire.mvp.Command; import org.uberfire.workbench.model.ActivityResourceType; /** * A navigation widget that displays a set of navigation items using a navigable tile based approach where * {@link NavGroup} instances are displayed as folders and {@link NavItem} are shown as links to a specific * target asset (f.i: a perspective). */ @Dependent public class NavTilesWidget extends BaseNavWidget { public interface View extends NavWidgetView<NavTilesWidget>, LayoutRecursionIssueI18n { void addTileWidget(IsElement tileWidget); void showTileContent(IsWidget tileContent); void clearBreadcrumb(); void addBreadcrumbItem(String navItemName); void addBreadcrumbItem(String navItemName, Command onClicked); void infiniteRecursionError(String cause); } View view; PerspectivePluginManager perspectivePluginManager; PlaceManager placeManager; SyncBeanManager beanManager; NavItem currentPerspectiveNavItem = null; Stack<NavItem> navItemStack = new Stack<>(); @Inject public NavTilesWidget(View view, NavigationManager navigationManager, PerspectivePluginManager perspectivePluginManager, PlaceManager placeManager, SyncBeanManager beanManager) { super(view, navigationManager); this.view = view; this.perspectivePluginManager = perspectivePluginManager; this.placeManager = placeManager; this.beanManager = beanManager; } public Stack<NavItem> getNavItemStack() { return navItemStack; } @Override public void show(NavGroup navGroup) { this.show(navGroup, true); } @Override public void show(List<NavItem> itemList) { currentPerspectiveNavItem = null; super.show(itemList); } public void show(NavGroup navGroup, boolean clearBreadcrumb) { if (navGroup == null) { view.errorNavGroupNotFound(); } else { NavGroup clone = (NavGroup) navGroup.cloneItem(); clone.setParent(null); if (clearBreadcrumb) { navItemStack.clear(); updateBreadcrumb(); } currentPerspectiveNavItem = null; super.show(clone); } } @Override protected void showItem(NavItem navItem) { NavItemTileWidget tileWidget = beanManager.lookupBean(NavItemTileWidget.class).getInstance(); tileWidget.setOnClick(() -> this.openItem(navItem)); tileWidget.show(navItem); view.addTileWidget(tileWidget); } @Override protected void showGroup(NavGroup navGroup) { showItem(navGroup); } public void openItem(NavItem navItem) { NavItem parent = navItem.getParent(); if (navItemStack.isEmpty()) { if (parent != null) { navItemStack.add(parent); navItemStack.add(navItem); } } else { navItemStack.add(navItem); } this.updateBreadcrumb(); if (navItem instanceof NavGroup) { this.show((NavGroup) navItem, false); } else { NavWorkbenchCtx navCtx = NavWorkbenchCtx.get(navItem); String resourceId = navCtx.getResourceId(); if (resourceId != null && ActivityResourceType.PERSPECTIVE.equals(navCtx.getResourceType())) { // Runtime perspectives are displayed inline if (perspectivePluginManager.isRuntimePerspective(resourceId)) { openPerspective(navItem); } // Classic UF perspectives take over the entire window else { placeManager.goTo(resourceId); } } } } protected void openPerspective(NavItem perspectiveItem) { NavWorkbenchCtx navCtx = NavWorkbenchCtx.get(perspectiveItem); String perspectiveId = navCtx.getResourceId(); String navRootId = navCtx.getNavGroupId(); currentPerspectiveNavItem = perspectiveItem; LayoutTemplateContext layoutCtx = new LayoutTemplateContext(navRootId); perspectivePluginManager.buildPerspectiveWidget(perspectiveId, layoutCtx, view::showTileContent, this::onInfiniteRecursion); } public void onInfiniteRecursion(LayoutRecursionIssue issue) { String cause = issue.printReport(navigationManager.getNavTree(), view); view.infiniteRecursionError(cause); } protected void updateBreadcrumb() { view.clearBreadcrumb(); for (int i=0; i<navItemStack.size(); i++) { final NavItem navItem = navItemStack.get(i); if (i == navItemStack.size()-1) { view.addBreadcrumbItem(navItem.getName()); } else { view.addBreadcrumbItem(navItem.getName(), () -> gotoBreadcrumbItem(navItem)); } } } public void gotoBreadcrumbItem(NavItem navItem) { while (navItemStack.peek() != navItem) { navItemStack.pop(); } // Re-open the item if (!navItemStack.isEmpty()) { navItemStack.pop(); } openItem(navItem); } // Catch changes on runtime perspectives so as to display the most up to date changes private void onPerspectiveChanged(@Observes PluginSaved event) { if (currentPerspectiveNavItem != null) { NavWorkbenchCtx navCtx = NavWorkbenchCtx.get(currentPerspectiveNavItem); String perspectiveId = navCtx.getResourceId(); if (event.getPlugin().getName().equals(perspectiveId)) { openPerspective(currentPerspectiveNavItem); } } } }
/** * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.swarm.arquillian.resolver; import java.io.File; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import org.apache.maven.settings.Settings; import org.eclipse.aether.DefaultRepositorySystemSession; import org.eclipse.aether.RepositoryListener; import org.eclipse.aether.repository.RepositoryPolicy; import org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem; import org.jboss.shrinkwrap.resolver.api.maven.Maven; import org.jboss.shrinkwrap.resolver.api.maven.MavenResolvedArtifact; import org.jboss.shrinkwrap.resolver.api.maven.coordinate.MavenCoordinate; import org.jboss.shrinkwrap.resolver.api.maven.repository.MavenChecksumPolicy; import org.jboss.shrinkwrap.resolver.api.maven.repository.MavenRemoteRepositories; import org.jboss.shrinkwrap.resolver.api.maven.repository.MavenRemoteRepository; import org.jboss.shrinkwrap.resolver.api.maven.repository.MavenUpdatePolicy; import org.jboss.shrinkwrap.resolver.api.maven.strategy.MavenResolutionStrategy; import org.jboss.shrinkwrap.resolver.api.maven.strategy.NonTransitiveStrategy; import org.jboss.shrinkwrap.resolver.api.maven.strategy.TransitiveStrategy; import org.jboss.shrinkwrap.resolver.impl.maven.ConfigurableMavenWorkingSessionImpl; import org.jboss.shrinkwrap.resolver.impl.maven.MavenWorkingSessionContainer; import org.wildfly.swarm.spi.api.internal.SwarmInternalProperties; import org.wildfly.swarm.tools.ArtifactResolvingHelper; import org.wildfly.swarm.tools.ArtifactSpec; /** * @author Bob McWhirter * @author Ken Finnigan */ public class ShrinkwrapArtifactResolvingHelper implements ArtifactResolvingHelper { private static AtomicReference<ShrinkwrapArtifactResolvingHelper> INSTANCE = new AtomicReference<>(); public static ShrinkwrapArtifactResolvingHelper defaultInstance() { return INSTANCE.updateAndGet(e -> { if (e != null) { return e; } MavenRemoteRepository jbossPublic = MavenRemoteRepositories.createRemoteRepository("jboss-public-repository-group", "http://repository.jboss.org/nexus/content/groups/public/", "default"); jbossPublic.setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE); jbossPublic.setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER); MavenRemoteRepository gradleTools = MavenRemoteRepositories.createRemoteRepository("gradle", "http://repo.gradle.org/gradle/libs-releases-local", "default"); gradleTools.setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE); gradleTools.setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER); Boolean offline = Boolean.valueOf(System.getProperty("swarm.resolver.offline", "false")); final ConfigurableMavenResolverSystem resolver = Maven.configureResolver() .withMavenCentralRepo(true) .withRemoteRepo(jbossPublic) .withRemoteRepo(gradleTools) .workOffline(offline); final String additionalRepos = System.getProperty(SwarmInternalProperties.BUILD_REPOS); if (additionalRepos != null) { Arrays.asList(additionalRepos.split(",")) .forEach(r -> { MavenRemoteRepository repo = MavenRemoteRepositories.createRemoteRepository(r, r, "default"); repo.setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE); repo.setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER); resolver.withRemoteRepo(repo); }); } ShrinkwrapArtifactResolvingHelper helper = new ShrinkwrapArtifactResolvingHelper(resolver); helper.session().setCache(new SimpleRepositoryCache()); helper.session().setUpdatePolicy(RepositoryPolicy.UPDATE_POLICY_DAILY); helper.session().setChecksumPolicy(RepositoryPolicy.CHECKSUM_POLICY_IGNORE); return helper; }); } public ConfigurableMavenResolverSystem getResolver() { return resolver; } public ShrinkwrapArtifactResolvingHelper(ConfigurableMavenResolverSystem resolver) { this.resolver = resolver; transferListener(new FailureReportingTransferListener()); } @Override public ArtifactSpec resolve(ArtifactSpec spec) { if (spec.file == null) { final File localFile = new File(settings().getLocalRepository(), spec.jarRepoPath()); if (localFile.exists()) { spec.file = localFile; } else { resetListeners(); try { final File file = this.resolver.resolve(spec.mavenGav()).withoutTransitivity().asSingleFile(); if (file != null) { spec.file = file; } } finally { resolutionComplete(); } } } return spec.file != null ? spec : null; } @Override public Set<ArtifactSpec> resolveAll(final Set<ArtifactSpec> specs, boolean transitive, boolean defaultExcludes) { if (specs.isEmpty()) { return specs; } MavenResolutionStrategy transitivityStrategy = (transitive ? TransitiveStrategy.INSTANCE : NonTransitiveStrategy.INSTANCE); resetListeners(); final MavenResolvedArtifact[] artifacts = withResolver(r -> r.resolve(specs.stream().map(ArtifactSpec::mavenGav).collect(Collectors.toList())) .using(transitivityStrategy) .as(MavenResolvedArtifact.class)); return Arrays.stream(artifacts).map(artifact -> { final MavenCoordinate coord = artifact.getCoordinate(); return new ArtifactSpec("compile", coord.getGroupId(), coord.getArtifactId(), coord.getVersion(), coord.getPackaging().getId(), coord.getClassifier(), artifact.asFile()); }).collect(Collectors.toSet()); } public ShrinkwrapArtifactResolvingHelper repositoryListener(final RepositoryListener l) { this.repositoryListener = l; return this; } public ShrinkwrapArtifactResolvingHelper transferListener(final CompletableTransferListener l) { this.transferListener = l; return this; } public MavenResolvedArtifact[] withResolver(ResolverAction action) { resetListeners(); try { return action.resolve(this.resolver); } finally { resolutionComplete(); } } private void resetListeners() { final DefaultRepositorySystemSession session = session(); session.setRepositoryListener(this.repositoryListener); session.setTransferListener(this.transferListener); } private void resolutionComplete() { if (this.transferListener != null) { this.transferListener.complete(); } } private DefaultRepositorySystemSession session() { return (DefaultRepositorySystemSession) invokeWorkingSessionMethod("getSession"); } private Settings settings() { return (Settings) invokeWorkingSessionMethod("getSettings"); } private Object invokeWorkingSessionMethod(final String methodName) { try { final Method method = ConfigurableMavenWorkingSessionImpl.class.getDeclaredMethod(methodName); method.setAccessible(true); return method.invoke(((MavenWorkingSessionContainer) this.resolver).getMavenWorkingSession()); } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new RuntimeException("Failed to invoke " + methodName, e); } } private final ConfigurableMavenResolverSystem resolver; private CompletableTransferListener transferListener; private RepositoryListener repositoryListener; public interface ResolverAction { MavenResolvedArtifact[] resolve(ConfigurableMavenResolverSystem resolver); } }
/* * Copyright 2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.alexfalappa.nbspringboot.projects.initializr; import java.util.ArrayList; import org.junit.Test; import static org.junit.Assert.*; /** * Tests for {@link ArtifactVersion} * * @author Alessandro Falappa */ public class ArtifactVersionTest { @Test public void testEquals1() { final ArtifactVersion a1 = new ArtifactVersion(1, 4, 2); final ArtifactVersion a2 = new ArtifactVersion(1, 4, 2); System.out.format("%s equals %s\n", a1, a2); assertEquals(a1, a2); } @Test public void testEquals2() { final ArtifactVersion a1 = new ArtifactVersion(1, 1, 3, "SNAPSHOT"); final ArtifactVersion a2 = new ArtifactVersion(1, 1, 3, "SNAPSHOT"); System.out.format("%s equals %s\n", a1, a2); assertEquals(a1, a2); } @Test public void testEquals3() { final ArtifactVersion a1 = new ArtifactVersion(1, 1, 3, "SNAPSHOT"); final ArtifactVersion a2 = new ArtifactVersion(1, 1, 3, "snapshot"); System.out.format("%s equals %s\n", a1, a2); assertEquals(a1, a2); } @Test public void testNotEquals1() { final ArtifactVersion a1 = new ArtifactVersion(1, 4, 2); final ArtifactVersion a2 = new ArtifactVersion(1, 4, 2, "M1"); System.out.format("%s not equals %s\n", a1, a2); assertNotEquals(a1, a2); } @Test public void testNotEquals2() { final ArtifactVersion a1 = new ArtifactVersion(1, 4, 2, "M2"); final ArtifactVersion a2 = new ArtifactVersion(1, 4, 2, "M1"); System.out.format("%s not equals %s\n", a1, a2); assertNotEquals(a1, a2); } @Test public void testOf1() { String versionString = "1.4.2"; System.out.println("of " + versionString); assertEquals(new ArtifactVersion(1, 4, 2), ArtifactVersion.of(versionString)); } @Test public void testOf2() { String versionString = "1.2.3-RC"; System.out.println("of " + versionString); assertEquals(new ArtifactVersion(1, 2, 3, "RC"), ArtifactVersion.of(versionString)); } @Test public void testOf3() { String versionString = "3.2.1.M2"; System.out.println("of " + versionString); assertEquals(new ArtifactVersion(3, 2, 1, "M2"), ArtifactVersion.of(versionString)); } @Test public void testOf4() { String versionString = "1.2.3-m4"; System.out.println("of " + versionString); assertEquals(new ArtifactVersion(1, 2, 3, "M4"), ArtifactVersion.of(versionString)); } @Test public void testOf9() { String versionString = "1.2.3.build-snapshot"; System.out.println("of " + versionString); assertEquals(new ArtifactVersion(1, 2, 3, "BUILD-SNAPSHOT"), ArtifactVersion.of(versionString)); } @Test(expected = IllegalArgumentException.class) public void testOf5() { String versionString = "1.2.3_INVALID"; System.out.println("of " + versionString); ArtifactVersion.of(versionString); } @Test(expected = IllegalArgumentException.class) public void testOf6() { String versionString = "1.a.3"; System.out.println("of " + versionString); ArtifactVersion.of(versionString); } @Test(expected = IllegalArgumentException.class) public void testOf7() { String versionString = "-1.2.3"; System.out.println("of " + versionString); ArtifactVersion.of(versionString); } @Test(expected = IllegalArgumentException.class) public void testOf8() { String versionString = "1.-2.3"; System.out.println("of " + versionString); ArtifactVersion.of(versionString); } @Test public void testCompareTo1() { ArtifactVersion one = new ArtifactVersion(1, 1, 2); ArtifactVersion two = new ArtifactVersion(1, 1, 2); System.out.format("compare %s = %s\n", one, two); assertEquals(0, one.compareTo(two)); } @Test public void testCompareTo2() { ArtifactVersion one = new ArtifactVersion(1, 1, 2, "M1"); ArtifactVersion two = new ArtifactVersion(1, 1, 2, "M1"); System.out.format("compare %s = %s\n", one, two); assertEquals(0, one.compareTo(two)); } @Test public void testCompareTo3() { ArtifactVersion one = new ArtifactVersion(1, 1, 2); ArtifactVersion two = new ArtifactVersion(2, 0, 1); System.out.format("compare %s < %s\n", one, two); assertEquals(-1, one.compareTo(two)); } @Test public void testCompareTo4() { ArtifactVersion one = new ArtifactVersion(1, 0, 13); ArtifactVersion two = new ArtifactVersion(1, 1, 1); System.out.format("compare %s < %s\n", one, two); assertEquals(-1, one.compareTo(two)); } @Test public void testCompareTo5() { ArtifactVersion one = new ArtifactVersion(1, 0, 1, "M4"); ArtifactVersion two = new ArtifactVersion(1, 0, 1, "RC1"); System.out.format("compare %s < %s\n", one, two); assertTrue(one.compareTo(two) < 0); } @Test public void testCompareTo6() { ArtifactVersion one = new ArtifactVersion(1, 0, 1, "RC1"); ArtifactVersion two = new ArtifactVersion(1, 0, 1, "RC2"); System.out.format("compare %s < %s\n", one, two); assertTrue(one.compareTo(two) < 0); } @Test public void testCompareTo7() { ArtifactVersion one = new ArtifactVersion(1, 0, 1, "RC2"); ArtifactVersion two = new ArtifactVersion(1, 0, 1, "SNAPSHOT"); System.out.format("compare %s < %s\n", one, two); assertTrue(one.compareTo(two) < 0); } @Test public void testCompareTo8() { ArtifactVersion one = new ArtifactVersion(1, 0, 1, "SNAPSHOT"); ArtifactVersion two = new ArtifactVersion(1, 0, 1); System.out.format("compare %s < %s\n", one, two); assertTrue(one.compareTo(two) < 0); } @Test public void testCollectionSort() { ArrayList<ArtifactVersion> versions = new ArrayList<>(); versions.add(ArtifactVersion.of("2.4.0-M1")); versions.add(ArtifactVersion.of("2.3.0-M1")); versions.add(ArtifactVersion.of("2.3.0-RC2")); versions.add(ArtifactVersion.of("2.3.0-M2")); versions.add(ArtifactVersion.of("2.3.1")); versions.add(ArtifactVersion.of("2.4.0")); versions.add(ArtifactVersion.of("2.3.0-SNAPSHOT")); versions.add(ArtifactVersion.of("2.4.0-RC1")); versions.add(ArtifactVersion.of("2.3.1-SNAPSHOT")); versions.add(ArtifactVersion.of("2.4.0-M2")); versions.add(ArtifactVersion.of("2.3.0-RC1")); versions.add(ArtifactVersion.of("2.4.0-SNAPSHOT")); versions.add(ArtifactVersion.of("2.3.0")); System.out.format("sort %s\n", versions); versions.sort(null); ArrayList<ArtifactVersion> progression = new ArrayList<>(); progression.add(new ArtifactVersion(2, 3, 0, "M1")); progression.add(new ArtifactVersion(2, 3, 0, "M2")); progression.add(new ArtifactVersion(2, 3, 0, "RC1")); progression.add(new ArtifactVersion(2, 3, 0, "RC2")); progression.add(new ArtifactVersion(2, 3, 0, "SNAPSHOT")); progression.add(new ArtifactVersion(2, 3, 0)); progression.add(new ArtifactVersion(2, 3, 1, "SNAPSHOT")); progression.add(new ArtifactVersion(2, 3, 1)); progression.add(new ArtifactVersion(2, 4, 0, "M1")); progression.add(new ArtifactVersion(2, 4, 0, "M2")); progression.add(new ArtifactVersion(2, 4, 0, "RC1")); progression.add(new ArtifactVersion(2, 4, 0, "SNAPSHOT")); progression.add(new ArtifactVersion(2, 4, 0)); assertEquals(versions, progression); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/enums/asset_group_status.proto package com.google.ads.googleads.v9.enums; /** * <pre> * Container for enum describing possible statuses of an asset group. * </pre> * * Protobuf type {@code google.ads.googleads.v9.enums.AssetGroupStatusEnum} */ public final class AssetGroupStatusEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v9.enums.AssetGroupStatusEnum) AssetGroupStatusEnumOrBuilder { private static final long serialVersionUID = 0L; // Use AssetGroupStatusEnum.newBuilder() to construct. private AssetGroupStatusEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AssetGroupStatusEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AssetGroupStatusEnum(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AssetGroupStatusEnum( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.enums.AssetGroupStatusProto.internal_static_google_ads_googleads_v9_enums_AssetGroupStatusEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.enums.AssetGroupStatusProto.internal_static_google_ads_googleads_v9_enums_AssetGroupStatusEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.class, com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.Builder.class); } /** * <pre> * The possible statuses of an asset group. * </pre> * * Protobuf enum {@code google.ads.googleads.v9.enums.AssetGroupStatusEnum.AssetGroupStatus} */ public enum AssetGroupStatus implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * The status has not been specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received value is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The asset group is enabled. * </pre> * * <code>ENABLED = 2;</code> */ ENABLED(2), /** * <pre> * The asset group is paused. * </pre> * * <code>PAUSED = 3;</code> */ PAUSED(3), /** * <pre> * The asset group is removed. * </pre> * * <code>REMOVED = 4;</code> */ REMOVED(4), UNRECOGNIZED(-1), ; /** * <pre> * The status has not been specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received value is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The asset group is enabled. * </pre> * * <code>ENABLED = 2;</code> */ public static final int ENABLED_VALUE = 2; /** * <pre> * The asset group is paused. * </pre> * * <code>PAUSED = 3;</code> */ public static final int PAUSED_VALUE = 3; /** * <pre> * The asset group is removed. * </pre> * * <code>REMOVED = 4;</code> */ public static final int REMOVED_VALUE = 4; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AssetGroupStatus valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AssetGroupStatus forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return ENABLED; case 3: return PAUSED; case 4: return REMOVED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<AssetGroupStatus> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< AssetGroupStatus> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<AssetGroupStatus>() { public AssetGroupStatus findValueByNumber(int number) { return AssetGroupStatus.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.getDescriptor().getEnumTypes().get(0); } private static final AssetGroupStatus[] VALUES = values(); public static AssetGroupStatus valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private AssetGroupStatus(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v9.enums.AssetGroupStatusEnum.AssetGroupStatus) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v9.enums.AssetGroupStatusEnum)) { return super.equals(obj); } com.google.ads.googleads.v9.enums.AssetGroupStatusEnum other = (com.google.ads.googleads.v9.enums.AssetGroupStatusEnum) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v9.enums.AssetGroupStatusEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible statuses of an asset group. * </pre> * * Protobuf type {@code google.ads.googleads.v9.enums.AssetGroupStatusEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.enums.AssetGroupStatusEnum) com.google.ads.googleads.v9.enums.AssetGroupStatusEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.enums.AssetGroupStatusProto.internal_static_google_ads_googleads_v9_enums_AssetGroupStatusEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.enums.AssetGroupStatusProto.internal_static_google_ads_googleads_v9_enums_AssetGroupStatusEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.class, com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.Builder.class); } // Construct using com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v9.enums.AssetGroupStatusProto.internal_static_google_ads_googleads_v9_enums_AssetGroupStatusEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v9.enums.AssetGroupStatusEnum getDefaultInstanceForType() { return com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v9.enums.AssetGroupStatusEnum build() { com.google.ads.googleads.v9.enums.AssetGroupStatusEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v9.enums.AssetGroupStatusEnum buildPartial() { com.google.ads.googleads.v9.enums.AssetGroupStatusEnum result = new com.google.ads.googleads.v9.enums.AssetGroupStatusEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v9.enums.AssetGroupStatusEnum) { return mergeFrom((com.google.ads.googleads.v9.enums.AssetGroupStatusEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v9.enums.AssetGroupStatusEnum other) { if (other == com.google.ads.googleads.v9.enums.AssetGroupStatusEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v9.enums.AssetGroupStatusEnum parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v9.enums.AssetGroupStatusEnum) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.enums.AssetGroupStatusEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v9.enums.AssetGroupStatusEnum) private static final com.google.ads.googleads.v9.enums.AssetGroupStatusEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v9.enums.AssetGroupStatusEnum(); } public static com.google.ads.googleads.v9.enums.AssetGroupStatusEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AssetGroupStatusEnum> PARSER = new com.google.protobuf.AbstractParser<AssetGroupStatusEnum>() { @java.lang.Override public AssetGroupStatusEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AssetGroupStatusEnum(input, extensionRegistry); } }; public static com.google.protobuf.Parser<AssetGroupStatusEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AssetGroupStatusEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v9.enums.AssetGroupStatusEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.documentsui.model; import static com.android.documentsui.model.DocumentInfo.getCursorInt; import static com.android.documentsui.model.DocumentInfo.getCursorLong; import static com.android.documentsui.model.DocumentInfo.getCursorString; import android.content.Context; import android.database.Cursor; import android.graphics.drawable.Drawable; import android.os.Parcel; import android.os.Parcelable; import android.provider.DocumentsContract.Root; import android.text.TextUtils; import com.android.documentsui.IconUtils; import com.android.documentsui.R; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.net.ProtocolException; import java.util.Objects; /** * Representation of a {@link Root}. */ public class RootInfo implements Durable, Parcelable { private static final int VERSION_INIT = 1; private static final int VERSION_DROP_TYPE = 2; public String authority; public String rootId; public int flags; public int icon; public String title; public String summary; public String documentId; public long availableBytes; public String mimeTypes; /** Derived fields that aren't persisted */ public String[] derivedMimeTypes; public int derivedIcon; public RootInfo() { reset(); } @Override public void reset() { authority = null; rootId = null; flags = 0; icon = 0; title = null; summary = null; documentId = null; availableBytes = -1; mimeTypes = null; derivedMimeTypes = null; derivedIcon = 0; } @Override public void read(DataInputStream in) throws IOException { final int version = in.readInt(); switch (version) { case VERSION_DROP_TYPE: authority = DurableUtils.readNullableString(in); rootId = DurableUtils.readNullableString(in); flags = in.readInt(); icon = in.readInt(); title = DurableUtils.readNullableString(in); summary = DurableUtils.readNullableString(in); documentId = DurableUtils.readNullableString(in); availableBytes = in.readLong(); mimeTypes = DurableUtils.readNullableString(in); deriveFields(); break; default: throw new ProtocolException("Unknown version " + version); } } @Override public void write(DataOutputStream out) throws IOException { out.writeInt(VERSION_DROP_TYPE); DurableUtils.writeNullableString(out, authority); DurableUtils.writeNullableString(out, rootId); out.writeInt(flags); out.writeInt(icon); DurableUtils.writeNullableString(out, title); DurableUtils.writeNullableString(out, summary); DurableUtils.writeNullableString(out, documentId); out.writeLong(availableBytes); DurableUtils.writeNullableString(out, mimeTypes); } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { DurableUtils.writeToParcel(dest, this); } public static final Creator<RootInfo> CREATOR = new Creator<RootInfo>() { @Override public RootInfo createFromParcel(Parcel in) { final RootInfo root = new RootInfo(); DurableUtils.readFromParcel(in, root); return root; } @Override public RootInfo[] newArray(int size) { return new RootInfo[size]; } }; public static RootInfo fromRootsCursor(String authority, Cursor cursor) { final RootInfo root = new RootInfo(); root.authority = authority; root.rootId = getCursorString(cursor, Root.COLUMN_ROOT_ID); root.flags = getCursorInt(cursor, Root.COLUMN_FLAGS); root.icon = getCursorInt(cursor, Root.COLUMN_ICON); root.title = getCursorString(cursor, Root.COLUMN_TITLE); root.summary = getCursorString(cursor, Root.COLUMN_SUMMARY); root.documentId = getCursorString(cursor, Root.COLUMN_DOCUMENT_ID); root.availableBytes = getCursorLong(cursor, Root.COLUMN_AVAILABLE_BYTES); root.mimeTypes = getCursorString(cursor, Root.COLUMN_MIME_TYPES); root.deriveFields(); return root; } private void deriveFields() { derivedMimeTypes = (mimeTypes != null) ? mimeTypes.split("\n") : null; // TODO: remove these special case icons if (isExternalStorage()) { derivedIcon = R.drawable.ic_root_sdcard; } else if (isDownloads()) { derivedIcon = R.drawable.ic_root_download; } else if (isImages()) { derivedIcon = R.drawable.ic_doc_image; } else if (isVideos()) { derivedIcon = R.drawable.ic_doc_video; } else if (isAudio()) { derivedIcon = R.drawable.ic_doc_audio; } } public boolean isRecents() { return authority == null && rootId == null; } public boolean isExternalStorage() { return "com.android.externalstorage.documents".equals(authority); } public boolean isDownloads() { return "com.android.providers.downloads.documents".equals(authority); } public boolean isImages() { return "com.android.providers.media.documents".equals(authority) && "images_root".equals(rootId); } public boolean isVideos() { return "com.android.providers.media.documents".equals(authority) && "videos_root".equals(rootId); } public boolean isAudio() { return "com.android.providers.media.documents".equals(authority) && "audio_root".equals(rootId); } @Override public String toString() { return "Root{authority=" + authority + ", rootId=" + rootId + ", title=" + title + "}"; } public Drawable loadIcon(Context context) { if (derivedIcon != 0) { return context.getDrawable(derivedIcon); } else { return IconUtils.loadPackageIcon(context, authority, icon); } } public Drawable loadDrawerIcon(Context context) { if (derivedIcon != 0) { return IconUtils.applyTintColor(context, derivedIcon, R.color.item_root_icon); } else { return IconUtils.loadPackageIcon(context, authority, icon); } } public Drawable loadGridIcon(Context context) { if (derivedIcon != 0) { return IconUtils.applyTintAttr(context, derivedIcon, android.R.attr.textColorPrimaryInverse); } else { return IconUtils.loadPackageIcon(context, authority, icon); } } public Drawable loadToolbarIcon(Context context) { if (derivedIcon != 0) { return IconUtils.applyTintAttr(context, derivedIcon, android.R.attr.colorControlNormal); } else { return IconUtils.loadPackageIcon(context, authority, icon); } } @Override public boolean equals(Object o) { if (o instanceof RootInfo) { final RootInfo root = (RootInfo) o; return Objects.equals(authority, root.authority) && Objects.equals(rootId, root.rootId); } else { return false; } } @Override public int hashCode() { return Objects.hash(authority, rootId); } public String getDirectoryString() { return !TextUtils.isEmpty(summary) ? summary : title; } }
/* * Copyright 2011 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.wicket.pages; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.wicket.PageParameters; import org.apache.wicket.behavior.SimpleAttributeModifier; import org.apache.wicket.extensions.markup.html.form.palette.Palette; import org.apache.wicket.markup.html.form.Button; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.model.CompoundPropertyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.util.CollectionModel; import org.apache.wicket.model.util.ListModel; import com.gitblit.Constants.AccessRestrictionType; import com.gitblit.GitBlit; import com.gitblit.GitBlitException; import com.gitblit.models.RepositoryModel; import com.gitblit.models.TeamModel; import com.gitblit.utils.StringUtils; import com.gitblit.wicket.RequiresAdminRole; import com.gitblit.wicket.StringChoiceRenderer; import com.gitblit.wicket.WicketUtils; import com.gitblit.wicket.panels.BulletListPanel; @RequiresAdminRole public class EditTeamPage extends RootSubPage { private final boolean isCreate; private IModel<String> mailingLists; public EditTeamPage() { // create constructor super(); isCreate = true; setupPage(new TeamModel("")); } public EditTeamPage(PageParameters params) { // edit constructor super(params); isCreate = false; String name = WicketUtils.getTeamname(params); TeamModel model = GitBlit.self().getTeamModel(name); setupPage(model); } protected void setupPage(final TeamModel teamModel) { if (isCreate) { super.setupPage(getString("gb.newTeam"), ""); } else { super.setupPage(getString("gb.edit"), teamModel.name); } CompoundPropertyModel<TeamModel> model = new CompoundPropertyModel<TeamModel>(teamModel); List<String> repos = new ArrayList<String>(); for (String repo : GitBlit.self().getRepositoryList()) { RepositoryModel repositoryModel = GitBlit.self().getRepositoryModel(repo); if (repositoryModel.accessRestriction.exceeds(AccessRestrictionType.NONE)) { repos.add(repo); } } StringUtils.sortRepositorynames(repos); List<String> teamUsers = new ArrayList<String>(teamModel.users); Collections.sort(teamUsers); List<String> preReceiveScripts = new ArrayList<String>(); List<String> postReceiveScripts = new ArrayList<String>(); final String oldName = teamModel.name; // repositories palette final Palette<String> repositories = new Palette<String>("repositories", new ListModel<String>(new ArrayList<String>(teamModel.repositories)), new CollectionModel<String>(repos), new StringChoiceRenderer(), 10, false); // users palette final Palette<String> users = new Palette<String>("users", new ListModel<String>( new ArrayList<String>(teamUsers)), new CollectionModel<String>(GitBlit.self() .getAllUsernames()), new StringChoiceRenderer(), 10, false); // pre-receive palette if (teamModel.preReceiveScripts != null) { preReceiveScripts.addAll(teamModel.preReceiveScripts); } final Palette<String> preReceivePalette = new Palette<String>("preReceiveScripts", new ListModel<String>(preReceiveScripts), new CollectionModel<String>(GitBlit .self().getPreReceiveScriptsUnused(null)), new StringChoiceRenderer(), 12, true); // post-receive palette if (teamModel.postReceiveScripts != null) { postReceiveScripts.addAll(teamModel.postReceiveScripts); } final Palette<String> postReceivePalette = new Palette<String>("postReceiveScripts", new ListModel<String>(postReceiveScripts), new CollectionModel<String>(GitBlit .self().getPostReceiveScriptsUnused(null)), new StringChoiceRenderer(), 12, true); Form<TeamModel> form = new Form<TeamModel>("editForm", model) { private static final long serialVersionUID = 1L; /* * (non-Javadoc) * * @see org.apache.wicket.markup.html.form.Form#onSubmit() */ @Override protected void onSubmit() { String teamname = teamModel.name; if (StringUtils.isEmpty(teamname)) { error(getString("gb.pleaseSetTeamName")); return; } if (isCreate) { TeamModel model = GitBlit.self().getTeamModel(teamname); if (model != null) { error(MessageFormat.format(getString("gb.teamNameUnavailable"), teamname)); return; } } Iterator<String> selectedRepositories = repositories.getSelectedChoices(); List<String> repos = new ArrayList<String>(); while (selectedRepositories.hasNext()) { repos.add(selectedRepositories.next().toLowerCase()); } if (repos.size() == 0) { error(getString("gb.teamMustSpecifyRepository")); return; } teamModel.repositories.clear(); teamModel.repositories.addAll(repos); Iterator<String> selectedUsers = users.getSelectedChoices(); List<String> members = new ArrayList<String>(); while (selectedUsers.hasNext()) { members.add(selectedUsers.next().toLowerCase()); } teamModel.users.clear(); teamModel.users.addAll(members); // set mailing lists String ml = mailingLists.getObject(); if (!StringUtils.isEmpty(ml)) { Set<String> list = new HashSet<String>(); for (String address : ml.split("(,|\\s)")) { if (StringUtils.isEmpty(address)) { continue; } list.add(address.toLowerCase()); } teamModel.mailingLists.clear(); teamModel.mailingLists.addAll(list); } // pre-receive scripts List<String> preReceiveScripts = new ArrayList<String>(); Iterator<String> pres = preReceivePalette.getSelectedChoices(); while (pres.hasNext()) { preReceiveScripts.add(pres.next()); } teamModel.preReceiveScripts.clear(); teamModel.preReceiveScripts.addAll(preReceiveScripts); // post-receive scripts List<String> postReceiveScripts = new ArrayList<String>(); Iterator<String> post = postReceivePalette.getSelectedChoices(); while (post.hasNext()) { postReceiveScripts.add(post.next()); } teamModel.postReceiveScripts.clear(); teamModel.postReceiveScripts.addAll(postReceiveScripts); try { GitBlit.self().updateTeamModel(oldName, teamModel, isCreate); } catch (GitBlitException e) { error(e.getMessage()); return; } setRedirect(false); if (isCreate) { // create another team info(MessageFormat.format(getString("gb.teamCreated"), teamModel.name)); } // back to users page setResponsePage(UsersPage.class); } }; // do not let the browser pre-populate these fields form.add(new SimpleAttributeModifier("autocomplete", "off")); // not all user services support manipulating team memberships boolean editMemberships = GitBlit.self().supportsTeamMembershipChanges(); // field names reflective match TeamModel fields form.add(new TextField<String>("name")); form.add(users.setEnabled(editMemberships)); mailingLists = new Model<String>(teamModel.mailingLists == null ? "" : StringUtils.flattenStrings(teamModel.mailingLists, " ")); form.add(new TextField<String>("mailingLists", mailingLists)); form.add(repositories); form.add(preReceivePalette); form.add(new BulletListPanel("inheritedPreReceive", "inherited", GitBlit.self() .getPreReceiveScriptsInherited(null))); form.add(postReceivePalette); form.add(new BulletListPanel("inheritedPostReceive", "inherited", GitBlit.self() .getPostReceiveScriptsInherited(null))); form.add(new Button("save")); Button cancel = new Button("cancel") { private static final long serialVersionUID = 1L; @Override public void onSubmit() { setResponsePage(UsersPage.class); } }; cancel.setDefaultFormProcessing(false); form.add(cancel); add(form); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math4.stat.descriptive.moment; import java.io.Serializable; import org.apache.commons.math4.exception.MathIllegalArgumentException; import org.apache.commons.math4.exception.NullArgumentException; import org.apache.commons.math4.exception.util.LocalizedFormats; import org.apache.commons.math4.stat.descriptive.AbstractStorelessUnivariateStatistic; import org.apache.commons.math4.stat.descriptive.WeightedEvaluation; import org.apache.commons.math4.util.MathArrays; import org.apache.commons.math4.util.MathUtils; /** * Computes the variance of the available values. By default, the unbiased * "sample variance" definitional formula is used: * <p> * variance = sum((x_i - mean)^2) / (n - 1) </p> * <p> * where mean is the {@link Mean} and <code>n</code> is the number * of sample observations.</p> * <p> * The definitional formula does not have good numerical properties, so * this implementation does not compute the statistic using the definitional * formula. <ul> * <li> The <code>getResult</code> method computes the variance using * updating formulas based on West's algorithm, as described in * <a href="http://doi.acm.org/10.1145/359146.359152"> Chan, T. F. and * J. G. Lewis 1979, <i>Communications of the ACM</i>, * vol. 22 no. 9, pp. 526-531.</a></li> * <li> The <code>evaluate</code> methods leverage the fact that they have the * full array of values in memory to execute a two-pass algorithm. * Specifically, these methods use the "corrected two-pass algorithm" from * Chan, Golub, Levesque, <i>Algorithms for Computing the Sample Variance</i>, * American Statistician, vol. 37, no. 3 (1983) pp. 242-247.</li></ul> * Note that adding values using <code>increment</code> or * <code>incrementAll</code> and then executing <code>getResult</code> will * sometimes give a different, less accurate, result than executing * <code>evaluate</code> with the full array of values. The former approach * should only be used when the full array of values is not available. * <p> * The "population variance" ( sum((x_i - mean)^2) / n ) can also * be computed using this statistic. The <code>isBiasCorrected</code> * property determines whether the "population" or "sample" value is * returned by the <code>evaluate</code> and <code>getResult</code> methods. * To compute population variances, set this property to <code>false.</code> * </p> * <p> * <strong>Note that this implementation is not synchronized.</strong> If * multiple threads access an instance of this class concurrently, and at least * one of the threads invokes the <code>increment()</code> or * <code>clear()</code> method, it must be synchronized externally.</p> */ public class Variance extends AbstractStorelessUnivariateStatistic implements Serializable, WeightedEvaluation { /** Serializable version identifier */ private static final long serialVersionUID = 20150412L; /** SecondMoment is used in incremental calculation of Variance*/ protected SecondMoment moment = null; /** * Whether or not {@link #increment(double)} should increment * the internal second moment. When a Variance is constructed with an * external SecondMoment as a constructor parameter, this property is * set to false and increments must be applied to the second moment * directly. */ protected boolean incMoment = true; /** * Whether or not bias correction is applied when computing the * value of the statistic. True means that bias is corrected. See * {@link Variance} for details on the formula. */ private boolean isBiasCorrected = true; /** * Constructs a Variance with default (true) <code>isBiasCorrected</code> * property. */ public Variance() { moment = new SecondMoment(); } /** * Constructs a Variance based on an external second moment. * <p> * When this constructor is used, the statistic may only be * incremented via the moment, i.e., {@link #increment(double)} * does nothing; whereas {@code m2.increment(value)} increments * both {@code m2} and the Variance instance constructed from it. * * @param m2 the SecondMoment (Third or Fourth moments work here as well.) */ public Variance(final SecondMoment m2) { incMoment = false; this.moment = m2; } /** * Constructs a Variance with the specified <code>isBiasCorrected</code> * property. * * @param isBiasCorrected setting for bias correction - true means * bias will be corrected and is equivalent to using the argumentless * constructor */ public Variance(boolean isBiasCorrected) { moment = new SecondMoment(); this.isBiasCorrected = isBiasCorrected; } /** * Constructs a Variance with the specified <code>isBiasCorrected</code> * property and the supplied external second moment. * * @param isBiasCorrected setting for bias correction - true means * bias will be corrected * @param m2 the SecondMoment (Third or Fourth moments work * here as well.) */ public Variance(boolean isBiasCorrected, SecondMoment m2) { incMoment = false; this.moment = m2; this.isBiasCorrected = isBiasCorrected; } /** * Copy constructor, creates a new {@code Variance} identical * to the {@code original}. * * @param original the {@code Variance} instance to copy * @throws NullArgumentException if original is null */ public Variance(Variance original) throws NullArgumentException { copy(original, this); } /** * {@inheritDoc} * <p>If all values are available, it is more accurate to use * {@link #evaluate(double[])} rather than adding values one at a time * using this method and then executing {@link #getResult}, since * <code>evaluate</code> leverages the fact that is has the full * list of values together to execute a two-pass algorithm. * See {@link Variance}.</p> * * <p>Note also that when {@link #Variance(SecondMoment)} is used to * create a Variance, this method does nothing. In that case, the * SecondMoment should be incremented directly.</p> */ @Override public void increment(final double d) { if (incMoment) { moment.increment(d); } } /** * {@inheritDoc} */ @Override public double getResult() { if (moment.n == 0) { return Double.NaN; } else if (moment.n == 1) { return 0d; } else { if (isBiasCorrected) { return moment.m2 / (moment.n - 1d); } else { return moment.m2 / (moment.n); } } } /** * {@inheritDoc} */ @Override public long getN() { return moment.getN(); } /** * {@inheritDoc} */ @Override public void clear() { if (incMoment) { moment.clear(); } } /** * Returns the variance of the entries in the input array, or * <code>Double.NaN</code> if the array is empty. * <p> * See {@link Variance} for details on the computing algorithm.</p> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if the array is null.</p> * <p> * Does not change the internal state of the statistic.</p> * * @param values the input array * @return the variance of the values or Double.NaN if length = 0 * @throws MathIllegalArgumentException if the array is null */ @Override public double evaluate(final double[] values) throws MathIllegalArgumentException { if (values == null) { throw new NullArgumentException(LocalizedFormats.INPUT_ARRAY); } return evaluate(values, 0, values.length); } /** * Returns the variance of the entries in the specified portion of * the input array, or <code>Double.NaN</code> if the designated subarray * is empty. Note that Double.NaN may also be returned if the input * includes NaN and / or infinite values. * <p> * See {@link Variance} for details on the computing algorithm.</p> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Does not change the internal state of the statistic.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if the array is null.</p> * * @param values the input array * @param begin index of the first array element to include * @param length the number of elements to include * @return the variance of the values or Double.NaN if length = 0 * @throws MathIllegalArgumentException if the array is null or the array index * parameters are not valid */ @Override public double evaluate(final double[] values, final int begin, final int length) throws MathIllegalArgumentException { double var = Double.NaN; if (MathArrays.verifyValues(values, begin, length)) { if (length == 1) { var = 0.0; } else if (length > 1) { Mean mean = new Mean(); double m = mean.evaluate(values, begin, length); var = evaluate(values, m, begin, length); } } return var; } /** * <p>Returns the weighted variance of the entries in the specified portion of * the input array, or <code>Double.NaN</code> if the designated subarray * is empty.</p> * <p> * Uses the formula <div style="white-space: pre"><code> * &Sigma;(weights[i]*(values[i] - weightedMean)<sup>2</sup>)/(&Sigma;(weights[i]) - 1) * </code></div> * where weightedMean is the weighted mean * <p> * This formula will not return the same result as the unweighted variance when all * weights are equal, unless all weights are equal to 1. The formula assumes that * weights are to be treated as "expansion values," as will be the case if for example * the weights represent frequency counts. To normalize weights so that the denominator * in the variance computation equals the length of the input vector minus one, use <pre> * <code>evaluate(values, MathArrays.normalizeArray(weights, values.length)); </code> * </pre> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Throws <code>IllegalArgumentException</code> if any of the following are true: * <ul><li>the values array is null</li> * <li>the weights array is null</li> * <li>the weights array does not have the same length as the values array</li> * <li>the weights array contains one or more infinite values</li> * <li>the weights array contains one or more NaN values</li> * <li>the weights array contains negative values</li> * <li>the start and length arguments do not determine a valid array</li> * </ul> * <p> * Does not change the internal state of the statistic.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if either array is null.</p> * * @param values the input array * @param weights the weights array * @param begin index of the first array element to include * @param length the number of elements to include * @return the weighted variance of the values or Double.NaN if length = 0 * @throws MathIllegalArgumentException if the parameters are not valid * @since 2.1 */ @Override public double evaluate(final double[] values, final double[] weights, final int begin, final int length) throws MathIllegalArgumentException { double var = Double.NaN; if (MathArrays.verifyValues(values, weights,begin, length)) { if (length == 1) { var = 0.0; } else if (length > 1) { Mean mean = new Mean(); double m = mean.evaluate(values, weights, begin, length); var = evaluate(values, weights, m, begin, length); } } return var; } /** * <p> * Returns the weighted variance of the entries in the input array.</p> * <p> * Uses the formula <div style="white-space:pre"><code> * &Sigma;(weights[i]*(values[i] - weightedMean)<sup>2</sup>)/(&Sigma;(weights[i]) - 1) * </code></div> * where weightedMean is the weighted mean * <p> * This formula will not return the same result as the unweighted variance when all * weights are equal, unless all weights are equal to 1. The formula assumes that * weights are to be treated as "expansion values," as will be the case if for example * the weights represent frequency counts. To normalize weights so that the denominator * in the variance computation equals the length of the input vector minus one, use <pre> * <code>evaluate(values, MathArrays.normalizeArray(weights, values.length)); </code> * </pre> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if any of the following are true: * <ul><li>the values array is null</li> * <li>the weights array is null</li> * <li>the weights array does not have the same length as the values array</li> * <li>the weights array contains one or more infinite values</li> * <li>the weights array contains one or more NaN values</li> * <li>the weights array contains negative values</li> * </ul> * <p> * Does not change the internal state of the statistic.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if either array is null.</p> * * @param values the input array * @param weights the weights array * @return the weighted variance of the values * @throws MathIllegalArgumentException if the parameters are not valid * @since 2.1 */ @Override public double evaluate(final double[] values, final double[] weights) throws MathIllegalArgumentException { return evaluate(values, weights, 0, values.length); } /** * Returns the variance of the entries in the specified portion of * the input array, using the precomputed mean value. Returns * <code>Double.NaN</code> if the designated subarray is empty. * <p> * See {@link Variance} for details on the computing algorithm.</p> * <p> * The formula used assumes that the supplied mean value is the arithmetic * mean of the sample data, not a known population parameter. This method * is supplied only to save computation when the mean has already been * computed.</p> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if the array is null.</p> * <p> * Does not change the internal state of the statistic.</p> * * @param values the input array * @param mean the precomputed mean value * @param begin index of the first array element to include * @param length the number of elements to include * @return the variance of the values or Double.NaN if length = 0 * @throws MathIllegalArgumentException if the array is null or the array index * parameters are not valid */ public double evaluate(final double[] values, final double mean, final int begin, final int length) throws MathIllegalArgumentException { double var = Double.NaN; if (MathArrays.verifyValues(values, begin, length)) { if (length == 1) { var = 0.0; } else if (length > 1) { double accum = 0.0; double dev = 0.0; double accum2 = 0.0; for (int i = begin; i < begin + length; i++) { dev = values[i] - mean; accum += dev * dev; accum2 += dev; } double len = length; if (isBiasCorrected) { var = (accum - (accum2 * accum2 / len)) / (len - 1.0); } else { var = (accum - (accum2 * accum2 / len)) / len; } } } return var; } /** * Returns the variance of the entries in the input array, using the * precomputed mean value. Returns <code>Double.NaN</code> if the array * is empty. * <p> * See {@link Variance} for details on the computing algorithm.</p> * <p> * If <code>isBiasCorrected</code> is <code>true</code> the formula used * assumes that the supplied mean value is the arithmetic mean of the * sample data, not a known population parameter. If the mean is a known * population parameter, or if the "population" version of the variance is * desired, set <code>isBiasCorrected</code> to <code>false</code> before * invoking this method.</p> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if the array is null.</p> * <p> * Does not change the internal state of the statistic.</p> * * @param values the input array * @param mean the precomputed mean value * @return the variance of the values or Double.NaN if the array is empty * @throws MathIllegalArgumentException if the array is null */ public double evaluate(final double[] values, final double mean) throws MathIllegalArgumentException { return evaluate(values, mean, 0, values.length); } /** * Returns the weighted variance of the entries in the specified portion of * the input array, using the precomputed weighted mean value. Returns * <code>Double.NaN</code> if the designated subarray is empty. * <p> * Uses the formula <div style="white-space:pre"><code> * &Sigma;(weights[i]*(values[i] - mean)<sup>2</sup>)/(&Sigma;(weights[i]) - 1) * </code></div> * <p> * The formula used assumes that the supplied mean value is the weighted arithmetic * mean of the sample data, not a known population parameter. This method * is supplied only to save computation when the mean has already been * computed.</p> * <p> * This formula will not return the same result as the unweighted variance when all * weights are equal, unless all weights are equal to 1. The formula assumes that * weights are to be treated as "expansion values," as will be the case if for example * the weights represent frequency counts. To normalize weights so that the denominator * in the variance computation equals the length of the input vector minus one, use <pre> * <code>evaluate(values, MathArrays.normalizeArray(weights, values.length), mean); </code> * </pre> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if any of the following are true: * <ul><li>the values array is null</li> * <li>the weights array is null</li> * <li>the weights array does not have the same length as the values array</li> * <li>the weights array contains one or more infinite values</li> * <li>the weights array contains one or more NaN values</li> * <li>the weights array contains negative values</li> * <li>the start and length arguments do not determine a valid array</li> * </ul> * <p> * Does not change the internal state of the statistic.</p> * * @param values the input array * @param weights the weights array * @param mean the precomputed weighted mean value * @param begin index of the first array element to include * @param length the number of elements to include * @return the variance of the values or Double.NaN if length = 0 * @throws MathIllegalArgumentException if the parameters are not valid * @since 2.1 */ public double evaluate(final double[] values, final double[] weights, final double mean, final int begin, final int length) throws MathIllegalArgumentException { double var = Double.NaN; if (MathArrays.verifyValues(values, weights, begin, length)) { if (length == 1) { var = 0.0; } else if (length > 1) { double accum = 0.0; double dev = 0.0; double accum2 = 0.0; for (int i = begin; i < begin + length; i++) { dev = values[i] - mean; accum += weights[i] * (dev * dev); accum2 += weights[i] * dev; } double sumWts = 0; for (int i = begin; i < begin + length; i++) { sumWts += weights[i]; } if (isBiasCorrected) { var = (accum - (accum2 * accum2 / sumWts)) / (sumWts - 1.0); } else { var = (accum - (accum2 * accum2 / sumWts)) / sumWts; } } } return var; } /** * <p>Returns the weighted variance of the values in the input array, using * the precomputed weighted mean value.</p> * <p> * Uses the formula <div style="white-space:pre"><code> * &Sigma;(weights[i]*(values[i] - mean)<sup>2</sup>)/(&Sigma;(weights[i]) - 1) * </code></div> * <p> * The formula used assumes that the supplied mean value is the weighted arithmetic * mean of the sample data, not a known population parameter. This method * is supplied only to save computation when the mean has already been * computed.</p> * <p> * This formula will not return the same result as the unweighted variance when all * weights are equal, unless all weights are equal to 1. The formula assumes that * weights are to be treated as "expansion values," as will be the case if for example * the weights represent frequency counts. To normalize weights so that the denominator * in the variance computation equals the length of the input vector minus one, use <pre> * <code>evaluate(values, MathArrays.normalizeArray(weights, values.length), mean); </code> * </pre> * <p> * Returns 0 for a single-value (i.e. length = 1) sample.</p> * <p> * Throws <code>MathIllegalArgumentException</code> if any of the following are true: * <ul><li>the values array is null</li> * <li>the weights array is null</li> * <li>the weights array does not have the same length as the values array</li> * <li>the weights array contains one or more infinite values</li> * <li>the weights array contains one or more NaN values</li> * <li>the weights array contains negative values</li> * </ul> * <p> * Does not change the internal state of the statistic.</p> * * @param values the input array * @param weights the weights array * @param mean the precomputed weighted mean value * @return the variance of the values or Double.NaN if length = 0 * @throws MathIllegalArgumentException if the parameters are not valid * @since 2.1 */ public double evaluate(final double[] values, final double[] weights, final double mean) throws MathIllegalArgumentException { return evaluate(values, weights, mean, 0, values.length); } /** * @return Returns the isBiasCorrected. */ public boolean isBiasCorrected() { return isBiasCorrected; } /** * @param biasCorrected The isBiasCorrected to set. */ public void setBiasCorrected(boolean biasCorrected) { this.isBiasCorrected = biasCorrected; } /** * {@inheritDoc} */ @Override public Variance copy() { Variance result = new Variance(); // No try-catch or advertised exception because parameters are guaranteed non-null copy(this, result); return result; } /** * Copies source to dest. * <p>Neither source nor dest can be null.</p> * * @param source Variance to copy * @param dest Variance to copy to * @throws NullArgumentException if either source or dest is null */ public static void copy(Variance source, Variance dest) throws NullArgumentException { MathUtils.checkNotNull(source); MathUtils.checkNotNull(dest); dest.moment = source.moment.copy(); dest.isBiasCorrected = source.isBiasCorrected; dest.incMoment = source.incMoment; } }
package com.mikepenz.materialdrawer.model; import android.content.Context; import android.graphics.Bitmap; import android.graphics.PorterDuff; import android.graphics.Typeface; import android.graphics.drawable.Drawable; import android.net.Uri; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.mikepenz.iconics.IconicsDrawable; import com.mikepenz.iconics.typeface.IIcon; import com.mikepenz.materialdrawer.R; import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem; import com.mikepenz.materialdrawer.model.interfaces.IProfile; import com.mikepenz.materialdrawer.model.interfaces.Identifyable; import com.mikepenz.materialdrawer.model.interfaces.Tagable; import com.mikepenz.materialdrawer.model.interfaces.Typefaceable; import com.mikepenz.materialdrawer.util.UIUtils; /** * Created by mikepenz on 03.02.15. */ public class ProfileSettingDrawerItem implements IDrawerItem, IProfile<ProfileSettingDrawerItem>, Tagable<ProfileSettingDrawerItem>, Identifyable<ProfileSettingDrawerItem>, Typefaceable<ProfileSettingDrawerItem> { private int identifier = -1; private boolean selectable = false; private Drawable icon; private Bitmap iconBitmap; private IIcon iicon; private Uri iconUri; private String name; private String email; private boolean enabled = true; private Object tag; private boolean iconTinted = false; private int selectedColor = 0; private int selectedColorRes = -1; private int textColor = 0; private int textColorRes = -1; private int iconColor = 0; private int iconColorRes = -1; private int visibility = View.VISIBLE; private Typeface typeface = null; public ProfileSettingDrawerItem withIdentifier(int identifier) { this.identifier = identifier; return this; } public ProfileSettingDrawerItem withIcon(Drawable icon) { this.icon = icon; return this; } public ProfileSettingDrawerItem withIcon(Bitmap icon) { this.iconBitmap = icon; return this; } public ProfileSettingDrawerItem withIcon(IIcon iicon) { this.iicon = iicon; return this; } @Override public ProfileSettingDrawerItem withIcon(String url) { this.iconUri = Uri.parse(url); return this; } @Override public ProfileSettingDrawerItem withIcon(Uri uri) { this.iconUri = uri; return this; } public ProfileSettingDrawerItem withName(String name) { this.name = name; return this; } public ProfileSettingDrawerItem withDescription(String description) { this.email = description; return this; } //NOTE we reuse the IProfile here to allow custom items within the AccountSwitcher. There is an alias method withDescription for this public ProfileSettingDrawerItem withEmail(String email) { this.email = email; return this; } public ProfileSettingDrawerItem withTag(Object object) { this.tag = object; return this; } public ProfileSettingDrawerItem setEnabled(boolean enabled) { this.enabled = enabled; return this; } public ProfileSettingDrawerItem withEnabled(boolean enabled) { this.enabled = enabled; return this; } public ProfileSettingDrawerItem withSelectedColor(int selectedColor) { this.selectedColor = selectedColor; return this; } public ProfileSettingDrawerItem withSelectedColorRes(int selectedColorRes) { this.selectedColorRes = selectedColorRes; return this; } public ProfileSettingDrawerItem withTextColor(int textColor) { this.textColor = textColor; return this; } public ProfileSettingDrawerItem withTextColorRes(int textColorRes) { this.textColorRes = textColorRes; return this; } public ProfileSettingDrawerItem withIconColor(int iconColor) { this.iconColor = iconColor; return this; } public ProfileSettingDrawerItem withIconColorRes(int iconColorRes) { this.iconColorRes = iconColorRes; return this; } @Override public ProfileSettingDrawerItem withSelectable(boolean selectable) { this.selectable = selectable; return this; } public ProfileSettingDrawerItem withTypeface(Typeface typeface) { this.typeface = typeface; return this; } public ProfileSettingDrawerItem withIconTinted(boolean iconTinted) { this.iconTinted = iconTinted; return this; } @Override public Bitmap getIconBitmap() { return iconBitmap; } @Override public void setIconBitmap(Bitmap iconBitmap) { this.iconBitmap = iconBitmap; } public int getSelectedColor() { return selectedColor; } public void setSelectedColor(int selectedColor) { this.selectedColor = selectedColor; } public int getSelectedColorRes() { return selectedColorRes; } public void setSelectedColorRes(int selectedColorRes) { this.selectedColorRes = selectedColorRes; } public int getTextColor() { return textColor; } public void setTextColor(int textColor) { this.textColor = textColor; } public int getTextColorRes() { return textColorRes; } public void setTextColorRes(int textColorRes) { this.textColorRes = textColorRes; } public int getIconColorRes() { return iconColorRes; } public void setIconColorRes(int iconColorRes) { this.iconColorRes = iconColorRes; } public int getIconColor() { return iconColor; } public void setIconColor(int iconColor) { this.iconColor = iconColor; } @Override public int getVisibility() { return visibility; } public void withvisibility(int visibility) { this.visibility = visibility; } @Override public Object getTag() { return tag; } @Override public void setTag(Object tag) { this.tag = tag; } public Drawable getIcon() { return icon; } public void setIcon(Drawable icon) { this.icon = icon; } public IIcon getIIcon() { return iicon; } public void setIIcon(IIcon iicon) { this.iicon = iicon; } public void setIcon(Uri uri) { this.iconUri = uri; } public void setIcon(String url) { this.iconUri = Uri.parse(url); } @Override public Uri getIconUri() { return iconUri; } @Override public boolean isSelectable() { return selectable; } @Override public ProfileSettingDrawerItem setSelectable(boolean selectable) { this.selectable = selectable; return this; } public boolean isIconTinted() { return iconTinted; } public void setIconTinted(boolean iconTinted) { this.iconTinted = iconTinted; } @Override public Typeface getTypeface() { return typeface; } @Override public void setTypeface(Typeface typeface) { this.typeface = typeface; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getDescription() { return email; } public void setDescription(String description) { this.email = email; } @Override public int getIdentifier() { return identifier; } public void setIdentifier(int identifier) { this.identifier = identifier; } @Override public boolean isEnabled() { return enabled; } @Override public String getType() { return "PROFILE_SETTING_ITEM"; } @Override public int getLayoutRes() { return R.layout.material_drawer_item_profile_setting; } @Override public View convertView(LayoutInflater inflater, View convertView, ViewGroup parent) { Context ctx = parent.getContext(); ViewHolder viewHolder; if (convertView == null) { convertView = inflater.inflate(getLayoutRes(), parent, false); viewHolder = new ViewHolder(convertView); convertView.setTag(viewHolder); } else { viewHolder = (ViewHolder) convertView.getTag(); } //get the correct color for the background int selectedColor = UIUtils.decideColor(ctx, getSelectedColor(), getSelectedColorRes(), R.attr.material_drawer_selected, R.color.material_drawer_selected); //get the correct color for the text int color = UIUtils.decideColor(ctx, getTextColor(), getTextColorRes(), R.attr.material_drawer_primary_text, R.color.material_drawer_primary_text); int iconColor = UIUtils.decideColor(ctx, getIconColor(), getIconColorRes(), R.attr.material_drawer_primary_icon, R.color.material_drawer_primary_icon); UIUtils.setBackground(viewHolder.view, UIUtils.getDrawerItemBackground(selectedColor)); viewHolder.name.setText(this.getName()); viewHolder.name.setTextColor(color); if (getTypeface() != null) { viewHolder.name.setTypeface(getTypeface()); } //get the correct icon if (this.getIcon() != null) { if (icon != null && isIconTinted()) { icon.setColorFilter(iconColor, PorterDuff.Mode.SRC_IN); } viewHolder.icon.setImageDrawable(icon); viewHolder.icon.setVisibility(View.VISIBLE); } else if (this.getIconBitmap() != null) { viewHolder.icon.setImageBitmap(iconBitmap); viewHolder.icon.setVisibility(View.VISIBLE); } else if (this.getIIcon() != null) { viewHolder.icon.setImageDrawable(new IconicsDrawable(ctx, this.getIIcon()).color(iconColor).actionBarSize().paddingDp(2)); viewHolder.icon.setVisibility(View.VISIBLE); } else { viewHolder.icon.setVisibility(View.GONE); } return convertView; } private static class ViewHolder { private View view; private ImageView icon; private TextView name; private ViewHolder(View view) { this.view = view; this.icon = (ImageView) view.findViewById(R.id.icon); this.name = (TextView) view.findViewById(R.id.name); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ranger.db; import java.util.ArrayList; import java.util.List; import javax.persistence.NoResultException; import org.apache.commons.collections.ListUtils; import org.apache.ranger.common.db.BaseDao; import org.apache.ranger.entity.XXPolicy; import org.apache.ranger.plugin.model.RangerSecurityZone; import org.springframework.stereotype.Service; /** */ @Service public class XXPolicyDao extends BaseDao<XXPolicy> { /** * Default Constructor */ public XXPolicyDao(RangerDaoManagerBase daoManager) { super(daoManager); } public XXPolicy findByNameAndServiceId(String polName, Long serviceId) { return findByNameAndServiceIdAndZoneId(polName, serviceId, RangerSecurityZone.RANGER_UNZONED_SECURITY_ZONE_ID); } public XXPolicy findByNameAndServiceIdAndZoneId(String polName, Long serviceId, Long zoneId) { if (polName == null || serviceId == null) { return null; } XXPolicy ret; try { ret = getEntityManager() .createNamedQuery("XXPolicy.findByNameAndServiceIdAndZoneId", tClass) .setParameter("polName", polName).setParameter("serviceId", serviceId).setParameter("zoneId", zoneId) .getSingleResult(); } catch (NoResultException e) { ret = null; } return ret; } public XXPolicy findByPolicyName(String polName) { if (polName == null) { return null; } try { XXPolicy xPol = getEntityManager().createNamedQuery("XXPolicy.findByPolicyName", tClass) .setParameter("polName", polName).getSingleResult(); return xPol; } catch (NoResultException e) { return null; } } public List<XXPolicy> findByServiceId(Long serviceId) { if (serviceId == null) { return new ArrayList<XXPolicy>(); } try { return getEntityManager() .createNamedQuery("XXPolicy.findByServiceId", tClass) .setParameter("serviceId", serviceId).getResultList(); } catch (NoResultException e) { return new ArrayList<XXPolicy>(); } } public List<Long> findPolicyIdsByServiceId(Long serviceId) { List<Long> ret = new ArrayList<Long>(); try { ret = getEntityManager() .createNamedQuery("XXPolicy.findPolicyIdsByServiceId", Long.class) .setParameter("serviceId", serviceId).getResultList(); } catch (Exception e) { } return ret; } public Long getMaxIdOfXXPolicy() { try { return (Long) getEntityManager().createNamedQuery("XXPolicy.getMaxIdOfXXPolicy").getSingleResult(); } catch (NoResultException e) { return null; } } public List<XXPolicy> findByResourceSignatureByPolicyStatus(String serviceName, String policySignature, Boolean isPolicyEnabled) { if (policySignature == null || serviceName == null || isPolicyEnabled == null) { return new ArrayList<XXPolicy>(); } try { return getEntityManager().createNamedQuery("XXPolicy.findByResourceSignatureByPolicyStatus", tClass) .setParameter("resSignature", policySignature) .setParameter("serviceName", serviceName) .setParameter("isPolicyEnabled", isPolicyEnabled) .getResultList(); } catch (NoResultException e) { return new ArrayList<XXPolicy>(); } } public List<XXPolicy> findByResourceSignature(String serviceName, String policySignature) { if (policySignature == null || serviceName == null) { return new ArrayList<XXPolicy>(); } try { return getEntityManager().createNamedQuery("XXPolicy.findByResourceSignature", tClass) .setParameter("resSignature", policySignature) .setParameter("serviceName", serviceName) .getResultList(); } catch (NoResultException e) { return new ArrayList<XXPolicy>(); } } public List<XXPolicy> findByServiceDefId(Long serviceDefId) { if(serviceDefId == null) { return new ArrayList<XXPolicy>(); } try { return getEntityManager().createNamedQuery("XXPolicy.findByServiceDefId", tClass) .setParameter("serviceDefId", serviceDefId).getResultList(); } catch (NoResultException e) { return new ArrayList<XXPolicy>(); } } public void updateSequence() { Long maxId = getMaxIdOfXXPolicy(); if(maxId == null) { return; } updateSequence("X_POLICY_SEQ", maxId + 1); } public List<XXPolicy> findByUserId(Long userId) { if(userId == null || userId.equals(Long.valueOf(0L))) { return new ArrayList<XXPolicy>(); } try { return getEntityManager() .createNamedQuery("XXPolicy.findByUserId", tClass) .setParameter("userId", userId).getResultList(); } catch (NoResultException e) { return new ArrayList<XXPolicy>(); } } public List<XXPolicy> findByGroupId(Long groupId) { if(groupId == null || groupId.equals(Long.valueOf(0L))) { return new ArrayList<XXPolicy>(); } try { return getEntityManager() .createNamedQuery("XXPolicy.findByGroupId", tClass) .setParameter("groupId", groupId).getResultList(); } catch (NoResultException e) { return new ArrayList<XXPolicy>(); } } public List<Long> findPolicyIdsByServiceNameAndZoneId(String serviceName, Long zoneId) { List<Long> ret = new ArrayList<Long>(); try { ret = getEntityManager() .createNamedQuery("XXPolicy.findPolicyIdsByServiceNameAndZoneId", Long.class) .setParameter("serviceName", serviceName) .setParameter("zoneId", zoneId) .getResultList(); } catch (Exception e) { } return ret; } public List<XXPolicy> findByRoleId(Long roleId) { List<XXPolicy> ret = ListUtils.EMPTY_LIST; if (roleId != null) { try { ret = getEntityManager().createNamedQuery("XXPolicy.findByRoleId", tClass) .setParameter("roleId", roleId) .getResultList(); } catch (NoResultException excp) { } } return ret; } public List<Long> findServiceIdsByRoleId(Long roleId) { List<Long> ret = ListUtils.EMPTY_LIST; if (roleId != null) { try { ret = getEntityManager().createNamedQuery("XXPolicy.findServiceIdsByRoleId", Long.class) .setParameter("roleId", roleId) .getResultList(); } catch (NoResultException excp) { } } return ret; } public List<Long> findPolicyIdsByRoleNameAndServiceId(String roleName, Long serviceId) { List<Long> ret = null; try { return getEntityManager() .createNamedQuery("XXPolicy.findPolicyIdsByRoleNameAndServiceId", Long.class) .setParameter("serviceId", serviceId) .setParameter("roleName", roleName).getResultList(); } catch (Exception e) { } return ret; } }
/* * Copyright (c) 2011-2015 EPFL DATA Laboratory * Copyright (c) 2014-2015 The Squall Collaboration (see NOTICE) * * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.epfl.data.squall.storm_components.hyper_cube; import ch.epfl.data.squall.operators.AggregateOperator; import ch.epfl.data.squall.operators.ChainOperator; import ch.epfl.data.squall.operators.Operator; import ch.epfl.data.squall.predicates.ComparisonPredicate; import ch.epfl.data.squall.storage.indexes.Index; import ch.epfl.data.squall.storm_components.*; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; import java.util.concurrent.Semaphore; import ch.epfl.data.squall.thetajoin.matrix_assignment.HyperCubeAssignerFactory; import ch.epfl.data.squall.thetajoin.matrix_assignment.HyperCubeAssignment; import ch.epfl.data.squall.types.Type; import ch.epfl.data.squall.visitors.PredicateCreateIndexesVisitor; import ch.epfl.data.squall.visitors.PredicateUpdateIndexesVisitor; import gnu.trove.list.array.TIntArrayList; import org.apache.log4j.Logger; import backtype.storm.Config; import backtype.storm.topology.InputDeclarer; import backtype.storm.topology.TopologyBuilder; import backtype.storm.tuple.Tuple; import ch.epfl.data.squall.components.ComponentProperties; import ch.epfl.data.squall.predicates.Predicate; import ch.epfl.data.squall.storage.TupleStorage; import ch.epfl.data.squall.storm_components.synchronization.TopologyKiller; import ch.epfl.data.squall.utilities.MyUtilities; import ch.epfl.data.squall.utilities.PeriodicAggBatchSend; import ch.epfl.data.squall.utilities.SystemParameters; import ch.epfl.data.squall.utilities.statistics.StatisticsUtilities; public class StormHyperCubeJoin extends StormBoltComponent { private static final long serialVersionUID = 1L; private static Logger LOG = Logger.getLogger(StormHyperCubeJoin.class); private List<TupleStorage> relationStorages; private ArrayList<Integer> joinOrders; private List<String> emitterIndexes; private long numSentTuples = 0; private Map<String, Predicate> joinPredicates; private ChainOperator operatorChain; // For each relation that has predicate between them we create Index. // firstRelationIndexes and secondRelationIndexes store Indexes between two relations. // [Key, ArrayList[Index]] -> key = [FirstRelation+SecondRelation], string concatenation of two relation names // Same for secondRelationIndexes, key = [SecondRelation+FirstRelation]. // Look at createIndexes() function for more understanding private Map<String, ArrayList<Index>> firstRelationIndexes = new HashMap<String, ArrayList<Index>>(); private Map<String, ArrayList<Index>> secondRelationIndexes = new HashMap<String, ArrayList<Index>>(); private Map<String, ArrayList<Integer>> operatorForIndexes = new HashMap<String, ArrayList<Integer>>(); private Map<String, ArrayList<Object>> typeOfValueIndexed = new HashMap<String, ArrayList<Object>>(); Map<String, List<String>> valuesToIndexMap = new HashMap<String, List<String>>(); private boolean existIndexes = false; // for agg batch sending private final Semaphore _semAgg = new Semaphore(1, true); private boolean _firstTime = true; private PeriodicAggBatchSend _periodicAggBatch; private long _aggBatchOutputMillis; // for printing statistics for creating graphs protected Calendar _cal = Calendar.getInstance(); protected DateFormat _dateFormat = new SimpleDateFormat("HH:mm:ss.SSS"); protected SimpleDateFormat _format = new SimpleDateFormat( "EEE MMM d HH:mm:ss zzz yyyy"); protected StatisticsUtilities _statsUtils; public StormHyperCubeJoin (ArrayList<StormEmitter> emitters, ComponentProperties cp, List<String> allCompNames, Map<String, Predicate> joinPredicates, int hierarchyPosition, TopologyBuilder builder, TopologyKiller killer, Config conf, Type wrapper) { super(cp, allCompNames, hierarchyPosition, false, conf); emitterIndexes = new ArrayList<String>(); for (int i = 0; i < emitters.size(); i++) { emitterIndexes.add(String.valueOf(allCompNames.indexOf(emitters.get(i).getName()))); } _aggBatchOutputMillis = cp.getBatchOutputMillis(); _statsUtils = new StatisticsUtilities(getConf(), LOG); final int parallelism = SystemParameters.getInt(conf, getID() + "_PAR"); operatorChain = cp.getChainOperator(); this.joinPredicates = joinPredicates; InputDeclarer currentBolt = builder.setBolt(getID(), this, parallelism); final HyperCubeAssignment _currentMappingAssignment; long[] cardinality = new long[emitters.size()]; for (int i = 0; i < emitters.size(); i++) cardinality[i] = SystemParameters.getInt(conf, emitters.get(i).getName() + "_CARD"); _currentMappingAssignment = new HyperCubeAssignerFactory().getAssigner(parallelism, cardinality); currentBolt = MyUtilities.attachEmitterHyperCube(currentBolt, emitters, allCompNames, _currentMappingAssignment, conf); if (getHierarchyPosition() == FINAL_COMPONENT && (!MyUtilities.isAckEveryTuple(conf))) killer.registerComponent(this, parallelism); if (cp.getPrintOut() && operatorChain.isBlocking()) currentBolt.allGrouping(killer.getID(), SystemParameters.DUMP_RESULTS_STREAM); relationStorages = new ArrayList<TupleStorage>(); joinOrders = new ArrayList<Integer>(); for (int i = 0; i < emitters.size(); i++) { relationStorages.add(new TupleStorage()); joinOrders.add(i); } if (joinPredicates != null) { createIndexes(); existIndexes = true; } else existIndexes = false; } @Override public void aggBatchSend() { if (MyUtilities.isAggBatchOutputMode(_aggBatchOutputMillis)) if (operatorChain != null) { final Operator lastOperator = operatorChain.getLastOperator(); if (lastOperator instanceof AggregateOperator) { try { _semAgg.acquire(); } catch (final InterruptedException ex) { } // sending final AggregateOperator agg = (AggregateOperator) lastOperator; final List<String> tuples = agg.getContent(); for (final String tuple : tuples) tupleSend(MyUtilities.stringToTuple(tuple, getConf()), null, 0); // clearing agg.clearStorage(); _semAgg.release(); } } } protected void applyOperatorsAndSend(Tuple stormTupleRcv, List<String> inTuple, long lineageTimestamp, boolean isLastInBatch) { if (MyUtilities.isAggBatchOutputMode(_aggBatchOutputMillis)) try { _semAgg.acquire(); } catch (final InterruptedException ex) { } for (List<String> tuple : operatorChain.process(inTuple, lineageTimestamp)) { if (MyUtilities.isAggBatchOutputMode(_aggBatchOutputMillis)) _semAgg.release(); if (tuple == null) return; numSentTuples++; printTuple(tuple); if (numSentTuples % _statsUtils.getDipOutputFreqPrint() == 0) printStatistics(SystemParameters.OUTPUT_PRINT); if (MyUtilities .isSending(getHierarchyPosition(), _aggBatchOutputMillis)) { long timestamp = 0; if (MyUtilities.isCustomTimestampMode(getConf())) // if measuring latency of the last operator only // if (getHierarchyPosition() == StormComponent.NEXT_TO_LAST_COMPONENT) // timestamp = System.currentTimeMillis(); timestamp = stormTupleRcv.getLongByField(StormComponent.TIMESTAMP); // the timestamp of the tuple that comes last tupleSend(tuple, stormTupleRcv, timestamp); } if (MyUtilities.isPrintLatency(getHierarchyPosition(), getConf())) printTupleLatency(numSentTuples - 1, lineageTimestamp); // TODO - this is always 0 in the code } } // For each emitter we check is there Predicate between them. // If yes, we create Indexes between them and add to firstRelationIndexes and secondRelationIndexes. private void createIndexes() { LOG.info("Emitter Indexes : " + emitterIndexes); LOG.info("Predicate : " + joinPredicates); for (int i = 0; i < emitterIndexes.size(); i++) { for (int j = i + 1; j < emitterIndexes.size(); j++) { String key = emitterIndexes.get(i) + emitterIndexes.get(j); String keyReverse = emitterIndexes.get(j) + emitterIndexes.get(i); if (joinPredicates.containsKey(key)) { Predicate pr = joinPredicates.get(key); final PredicateCreateIndexesVisitor visitor = new PredicateCreateIndexesVisitor(); pr.accept(visitor); firstRelationIndexes.put(key, new ArrayList<Index>(visitor._firstRelationIndexes)); secondRelationIndexes.put(keyReverse, new ArrayList<Index>(visitor._secondRelationIndexes)); operatorForIndexes.put(key, new ArrayList<Integer>(visitor._operatorForIndexes)); typeOfValueIndexed.put(key, new ArrayList<Object>(visitor._typeOfValueIndexed)); } } } } @Override public void execute(Tuple stormTupleRcv) { if (_firstTime && MyUtilities.isAggBatchOutputMode(_aggBatchOutputMillis)) { _periodicAggBatch = new PeriodicAggBatchSend(_aggBatchOutputMillis, this); _firstTime = false; } if (receivedDumpSignal(stormTupleRcv)) { MyUtilities.dumpSignal(this, stormTupleRcv, getCollector()); return; } if (!MyUtilities.isManualBatchingMode(getConf())) { final String inputComponentIndex = stormTupleRcv .getStringByField(StormComponent.COMP_INDEX); // getString(0); final List<String> tuple = (List<String>) stormTupleRcv.getValueByField(StormComponent.TUPLE); // getValue(1); final String inputTupleHash = stormTupleRcv.getStringByField(StormComponent.HASH);// getString(2); if (processFinalAck(tuple, stormTupleRcv)) return; final String inputTupleString = MyUtilities.tupleToString(tuple, getConf()); processNonLastTuple(inputComponentIndex, inputTupleString, tuple, stormTupleRcv, true); } else { final String inputComponentIndex = stormTupleRcv .getStringByField(StormComponent.COMP_INDEX); // getString(0); final String inputBatch = stormTupleRcv .getStringByField(StormComponent.TUPLE);// getString(1); final String[] wholeTuples = inputBatch .split(SystemParameters.MANUAL_BATCH_TUPLE_DELIMITER); final int batchSize = wholeTuples.length; for (int i = 0; i < batchSize; i++) { // parsing final String currentTuple = new String(wholeTuples[i]); final String[] parts = currentTuple.split(SystemParameters.MANUAL_BATCH_HASH_DELIMITER); String inputTupleHash = null; String inputTupleString = null; if (parts.length == 1) // lastAck inputTupleString = new String(parts[0]); else { inputTupleHash = new String(parts[0]); inputTupleString = new String(parts[1]); } final List<String> tuple = MyUtilities.stringToTuple( inputTupleString, getConf()); // final Ack check if (processFinalAck(tuple, stormTupleRcv)) { if (i != batchSize - 1) throw new RuntimeException( "Should not be here. LAST_ACK is not the last tuple!"); return; } // processing a tuple if (i == batchSize - 1) processNonLastTuple(inputComponentIndex, inputTupleString, tuple, stormTupleRcv, true); else processNonLastTuple(inputComponentIndex, inputTupleString, tuple, stormTupleRcv, false); } } getCollector().ack(stormTupleRcv); } @Override public ChainOperator getChainOperator() { return operatorChain; } // from IRichBolt @Override public Map<String, Object> getComponentConfiguration() { return getConf(); } @Override public String getInfoID() { final String str = "DestinationStorage " + getID() + " has ID: " + getID(); return str; } @Override public long getNumSentTuples() { return numSentTuples; } @Override public PeriodicAggBatchSend getPeriodicAggBatch() { return _periodicAggBatch; } protected void performJoin(Tuple stormTupleRcv, List<String> tuple, int rowID, String emitterIndex, boolean isLastInBatch) { ArrayList<Integer> result = new ArrayList<Integer>(); List<List<String>> outputTuples = new ArrayList<List<String>>(); result.add(rowID); // We have tuple from C. getOrderToJoin should return optimal join order for joining, e.g C - A - B - D - E ArrayList<Integer> joinOrder = getOrderToJoin(emitterIndex); join(1, joinOrder, result, outputTuples); long lineageTimestamp = 0; for (List<String> tpl : outputTuples) { applyOperatorsAndSend(stormTupleRcv, tpl, lineageTimestamp, isLastInBatch); } } // it is dummy function - Patrice is going to change arguments and return paramater. // it analyze join graph and returns arraylist for join order public ArrayList<Integer> getOrderToJoin(String emitterIndex) { //return new ArrayList<Integer>(); return joinOrders; } // RelationVisited - number of relations already joined from the left // JoinOrder - the order we should use to join relations, e.g A - B - C - D - E or A - C - D - B - E // JoinResult - if we have RelationVisited = 3, it means we have already joined there relations from the left // according JoinOrder. // OutputTuples - is aggregator, to collect the final result public void join(int relationVisited, ArrayList<Integer> joinOrder, ArrayList<Integer> joinResult, List<List<String>> outputTuples) { // we have rowA, rowB, rowC, rowD, rowE => we can create tuple if (relationVisited == relationStorages.size()) { List<List<String>> tuple = new ArrayList<List<String>>(); for (int i = 0; i < joinResult.size(); i++) { String oppositeTupleString = relationStorages.get(joinOrder.get(i)).get(joinResult.get(joinOrder.get(i))); final List<String> oppositeTuple = MyUtilities.stringToTuple( oppositeTupleString, getComponentConfiguration()); tuple.add(oppositeTuple); List<String> outputTuple = MyUtilities.createOutputTuple(tuple); outputTuples.add(outputTuple); } // no need to continue return; } // We have some tuple till me, for example in join result we have C - B - D // Now I should choose all possible tuples form A. // We look A predicates and choose A - C, A - B, and A - D and intersect them LinkedList<Integer> tuplesToJoin = new LinkedList<Integer>(); boolean firsTime = true; for (int i = 0; i < relationVisited; i++) { LinkedList<Integer> tmpTuplesToJoin = new LinkedList<Integer>(); selectTupleToJoinForMultipleJoin(joinOrder.get(i), joinOrder.get(relationVisited), tmpTuplesToJoin); if (firsTime) tuplesToJoin = tmpTuplesToJoin; else tuplesToJoin.retainAll(tmpTuplesToJoin); } // if empty we should not continue if (tuplesToJoin.isEmpty()) return; for (int id : tuplesToJoin) { ArrayList<Integer> newJoinResult = (ArrayList<Integer>)joinResult.clone(); newJoinResult.add(id); join(relationVisited + 1, joinOrder, newJoinResult, outputTuples); } } private void selectTupleToJoinForMultipleJoin(int firstEmitterIndex, int secondEmitterIndex, List<Integer> tuplesToJoin) { boolean isFromFirstEmitter = true; List<Index> oppositeIndexes = new ArrayList<Index>(); String key = emitterIndexes.get(firstEmitterIndex) + emitterIndexes.get(secondEmitterIndex); String keyReverse = emitterIndexes.get(secondEmitterIndex) + emitterIndexes.get(firstEmitterIndex); if (firstRelationIndexes.containsKey(key)) { oppositeIndexes = firstRelationIndexes.get(key); isFromFirstEmitter = true; } else if (secondRelationIndexes.containsKey(keyReverse)) { oppositeIndexes = secondRelationIndexes.get(keyReverse); isFromFirstEmitter = false; key = keyReverse; } // Predicate pr = joinPredicates.get(key); // final PredicateCreateIndexesVisitor visitor = new PredicateCreateIndexesVisitor(); // pr.accept(visitor); selectTupleToJoin(key, oppositeIndexes, isFromFirstEmitter, tuplesToJoin); } private void selectTupleToJoin(String key, List<Index> oppositeIndexes, boolean isFromFirstEmitter, List<Integer> tuplesToJoin) { final TIntArrayList rowIds = new TIntArrayList(); // If there is at least one index (so we have single join conditions with // 1 index per condition) // Get the row indices in the storage of the opposite relation that // satisfy each join condition (equijoin / inequality) // Then take the intersection of the returned row indices since each // join condition // is separated by AND for (int i = 0; i < oppositeIndexes.size(); i++) { TIntArrayList currentRowIds = null; final Index currentOpposIndex = oppositeIndexes.get(i); final String value = valuesToIndexMap.get(key).get(i); int currentOperator = operatorForIndexes.get(key).get(i); // Switch inequality operator if the tuple coming is from the other // relation if (isFromFirstEmitter) { final int operator = currentOperator; if (operator == ComparisonPredicate.GREATER_OP) currentOperator = ComparisonPredicate.LESS_OP; else if (operator == ComparisonPredicate.NONGREATER_OP) currentOperator = ComparisonPredicate.NONLESS_OP; else if (operator == ComparisonPredicate.LESS_OP) currentOperator = ComparisonPredicate.GREATER_OP; else if (operator == ComparisonPredicate.NONLESS_OP) currentOperator = ComparisonPredicate.NONGREATER_OP; else currentOperator = operator; } // Get the values from the index (check type first) if (typeOfValueIndexed.get(key).get(i) instanceof String) currentRowIds = currentOpposIndex.getValues(currentOperator, value); // Even if valueIndexed is at first time an integer with // precomputation a*col +b, it become a double else if (typeOfValueIndexed.get(key).get(i) instanceof Double) currentRowIds = currentOpposIndex.getValues(currentOperator, Double.parseDouble(value)); else if (typeOfValueIndexed.get(key).get(i) instanceof Integer) currentRowIds = currentOpposIndex.getValues(currentOperator, Integer.parseInt(value)); else if (typeOfValueIndexed.get(key).get(i) instanceof Long) currentRowIds = currentOpposIndex.getValues(currentOperator, Long.parseLong(value)); else if (typeOfValueIndexed.get(key).get(i) instanceof Date) try { currentRowIds = currentOpposIndex.getValues( currentOperator, _format.parse(value)); } catch (final java.text.ParseException e) { e.printStackTrace(); } else throw new RuntimeException("non supported type"); // Compute the intersection // TODO: Search only within the ids that are in rowIds from previous // conditions If // nothing returned (and since we want intersection), no need to // proceed. if (currentRowIds == null) return; // If it's the first index, add everything. Else keep the // intersection if (i == 0) rowIds.addAll(currentRowIds); else rowIds.retainAll(currentRowIds); // If empty after intersection, return if (rowIds.isEmpty()) return; } // generate tuplestorage for (int i = 0; i < rowIds.size(); i++) { final int id = rowIds.get(i); tuplesToJoin.add(id); } } @Override protected void printStatistics(int type) { } private void processNonLastTuple(String inputComponentIndex, String inputTupleString, // List<String> tuple, // these two are the same Tuple stormTupleRcv, boolean isLastInBatch) { // Find out affected storage TupleStorage affectedStorage = null; for (int i = 0; i < emitterIndexes.size(); i++) { if (inputComponentIndex.equals(emitterIndexes.get(i))) { affectedStorage = relationStorages.get(i); break; } } // add the stormTuple to the specific storage if (MyUtilities.isStoreTimestamp(getConf(), getHierarchyPosition())) { final long incomingTimestamp = stormTupleRcv.getLongByField(StormComponent.TIMESTAMP); inputTupleString = incomingTimestamp + SystemParameters.STORE_TIMESTAMP_DELIMITER + inputTupleString; } final int row_id = affectedStorage.insert(inputTupleString); if (existIndexes) updateIndexes(inputComponentIndex, tuple, row_id); performJoin(stormTupleRcv, tuple, row_id, inputComponentIndex, isLastInBatch); } // The same architecture that is in StormJoinBoltComponent. // However, we consider that there can be many different predicates for given relation // with different other relations. So, we should update all of them. private void updateIndexes(String inputComponentIndex, List<String> tuple, int row_id) { for (int i = 0; i < emitterIndexes.size(); i++) { String key = inputComponentIndex + emitterIndexes.get(i); String keyReverse = emitterIndexes.get(i) + inputComponentIndex; if (firstRelationIndexes.containsKey(key)) { List<Index> affectedIndexes = firstRelationIndexes.get(key); final PredicateUpdateIndexesVisitor visitor = new PredicateUpdateIndexesVisitor(true, tuple); Predicate _joinPredicate = joinPredicates.get(key); _joinPredicate.accept(visitor); final List<Object> typesOfValuesToIndex = new ArrayList<Object>( visitor._typesOfValuesToIndex); final List<String> valuesToIndex = new ArrayList<String>( visitor._valuesToIndex); for (int j = 0; j < affectedIndexes.size(); j++) if (typesOfValuesToIndex.get(j) instanceof Integer) affectedIndexes.get(j).put(row_id, Integer.parseInt(valuesToIndex.get(j))); else if (typesOfValuesToIndex.get(j) instanceof Double) affectedIndexes.get(j).put(row_id, Double.parseDouble(valuesToIndex.get(j))); else if (typesOfValuesToIndex.get(j) instanceof Long) affectedIndexes.get(j).put(row_id, Long.parseLong(valuesToIndex.get(j))); else if (typesOfValuesToIndex.get(j) instanceof Date) try { affectedIndexes.get(j).put(row_id, _format.parse(valuesToIndex.get(j))); } catch (final java.text.ParseException e) { throw new RuntimeException( "Parsing problem in StormThetaJoin.updatedIndexes " + e.getMessage()); } else if (typesOfValuesToIndex.get(j) instanceof String) affectedIndexes.get(j).put(row_id, valuesToIndex.get(j)); else throw new RuntimeException("non supported type"); valuesToIndexMap.put(key, valuesToIndex); } else if (secondRelationIndexes.containsKey(keyReverse)) { List<Index> affectedIndexes = secondRelationIndexes.get(keyReverse); final PredicateUpdateIndexesVisitor visitor = new PredicateUpdateIndexesVisitor(false, tuple); Predicate _joinPredicate = joinPredicates.get(keyReverse); _joinPredicate.accept(visitor); final List<Object> typesOfValuesToIndex = new ArrayList<Object>( visitor._typesOfValuesToIndex); final List<String> valuesToIndex = new ArrayList<String>( visitor._valuesToIndex); for (int j = 0; j < affectedIndexes.size(); j++) if (typesOfValuesToIndex.get(j) instanceof Integer) affectedIndexes.get(j).put(row_id, Integer.parseInt(valuesToIndex.get(j))); else if (typesOfValuesToIndex.get(j) instanceof Double) affectedIndexes.get(j).put(row_id, Double.parseDouble(valuesToIndex.get(j))); else if (typesOfValuesToIndex.get(j) instanceof Long) affectedIndexes.get(j).put(row_id, Long.parseLong(valuesToIndex.get(j))); else if (typesOfValuesToIndex.get(j) instanceof Date) try { affectedIndexes.get(j).put(row_id, _format.parse(valuesToIndex.get(j))); } catch (final java.text.ParseException e) { throw new RuntimeException( "Parsing problem in StormThetaJoin.updatedIndexes " + e.getMessage()); } else if (typesOfValuesToIndex.get(j) instanceof String) affectedIndexes.get(j).put(row_id, valuesToIndex.get(j)); else throw new RuntimeException("non supported type"); valuesToIndexMap.put(keyReverse, valuesToIndex); } } } // TODO WINDOW Semantics @Override public void purgeStaleStateFromWindow() { System.gc(); } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.sql.impl.exec; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.instance.impl.HazelcastInstanceProxy; import com.hazelcast.internal.serialization.impl.DefaultSerializationServiceBuilder; import com.hazelcast.internal.util.collection.PartitionIdSet; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.sql.impl.LoggingQueryOperationHandler; import com.hazelcast.sql.impl.NodeServiceProviderImpl; import com.hazelcast.sql.impl.QueryException; import com.hazelcast.sql.impl.QueryId; import com.hazelcast.sql.impl.ResultIterator; import com.hazelcast.sql.impl.SqlTestSupport; import com.hazelcast.sql.impl.exec.io.Inbox; import com.hazelcast.sql.impl.exec.io.Outbox; import com.hazelcast.sql.impl.exec.io.ReceiveExec; import com.hazelcast.sql.impl.exec.io.SendExec; import com.hazelcast.sql.impl.exec.io.flowcontrol.simple.SimpleFlowControl; import com.hazelcast.sql.impl.exec.io.flowcontrol.simple.SimpleFlowControlFactory; import com.hazelcast.sql.impl.exec.root.RootExec; import com.hazelcast.sql.impl.exec.root.RootResultConsumer; import com.hazelcast.sql.impl.exec.root.ScheduleCallback; import com.hazelcast.sql.impl.exec.scan.MapScanExec; import com.hazelcast.sql.impl.expression.ColumnExpression; import com.hazelcast.sql.impl.expression.ConstantPredicateExpression; import com.hazelcast.sql.impl.extract.GenericQueryTargetDescriptor; import com.hazelcast.sql.impl.operation.QueryExecuteOperation; import com.hazelcast.sql.impl.operation.QueryExecuteOperationFragment; import com.hazelcast.sql.impl.plan.node.FilterPlanNode; import com.hazelcast.sql.impl.plan.node.MapScanPlanNode; import com.hazelcast.sql.impl.plan.node.PlanNode; import com.hazelcast.sql.impl.plan.node.PlanNodeSchema; import com.hazelcast.sql.impl.plan.node.PlanNodeVisitor; import com.hazelcast.sql.impl.plan.node.ProjectPlanNode; import com.hazelcast.sql.impl.plan.node.RootPlanNode; import com.hazelcast.sql.impl.plan.node.io.ReceivePlanNode; import com.hazelcast.sql.impl.plan.node.io.SendPlanNode; import com.hazelcast.sql.impl.row.Row; import com.hazelcast.sql.impl.row.RowBatch; import com.hazelcast.sql.impl.type.QueryDataType; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import static com.hazelcast.sql.impl.operation.QueryExecuteOperationFragmentMapping.DATA_MEMBERS; import static com.hazelcast.sql.impl.operation.QueryExecuteOperationFragmentMapping.EXPLICIT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class CreateExecPlanNodeVisitorTest extends SqlTestSupport { private static final String MAP_NAME = "map"; private static final int ROOT_BATCH_SIZE = 1024; private static final int OUTBOX_BATCH_SIZE = 512 * 1024; private static final int EDGE_1_ID = 100; private static final long EDGE_1_INITIAL_MEMORY = 1000; private static final TestHazelcastInstanceFactory FACTORY = new TestHazelcastInstanceFactory(1); private static final QueryId QUERY_ID = new QueryId(); private static final int PARTITION_COUNT = 4; private static final int[] PARTITIONS_MEMBER_1 = new int[] { 1, 2 }; private static final int[] PARTITIONS_MEMBER_2 = new int[] { 3, 4 }; private static Map<UUID, PartitionIdSet> partitionMapping; private static UUID memberId1; private static final UUID MEMBER_ID_2 = UUID.randomUUID(); private static NodeServiceProviderImpl nodeServiceProvider; private int idGenerator; @BeforeClass public static void beforeClass() { HazelcastInstance instance = FACTORY.newHazelcastInstance(); instance.getMap(MAP_NAME).put(1, 1); nodeServiceProvider = new NodeServiceProviderImpl( ((HazelcastInstanceProxy) instance).getOriginal().node.nodeEngine ); memberId1 = instance.getLocalEndpoint().getUuid(); partitionMapping = new HashMap<>(); partitionMapping.put(memberId1, createPartitionIdSet(PARTITION_COUNT, PARTITIONS_MEMBER_1)); partitionMapping.put(MEMBER_ID_2, createPartitionIdSet(PARTITION_COUNT, PARTITIONS_MEMBER_2)); } @AfterClass public static void afterClass() { FACTORY.shutdownAll(); } @Test public void testRoot() { UpstreamNode upstreamNode = new UpstreamNode(nextNodeId()); RootPlanNode rootNode = new RootPlanNode(nextNodeId(), upstreamNode); QueryExecuteOperationFragment rootFragment = new QueryExecuteOperationFragment( rootNode, EXPLICIT, Collections.singletonList(memberId1) ); QueryExecuteOperation operation = createOperation( Collections.singletonList(rootFragment), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap() ); CreateExecPlanNodeVisitor visitor = visit(operation, rootFragment); RootExec rootExec = (RootExec) visitor.getExec(); assertEquals(rootNode.getId(), rootExec.getId()); assertEquals(operation.getRootConsumer(), rootExec.getConsumer()); assertEquals(ROOT_BATCH_SIZE, rootExec.getBatchSize()); UpstreamExec upstreamExec = (UpstreamExec) rootExec.getUpstream(); assertEquals(upstreamNode.getId(), upstreamExec.getId()); } @Test public void testSendNode() { UpstreamNode upstreamNode = new UpstreamNode(nextNodeId()); SendPlanNode sendNode = new SendPlanNode(nextNodeId(), upstreamNode, EDGE_1_ID); QueryExecuteOperationFragment sendFragment = new QueryExecuteOperationFragment( sendNode, DATA_MEMBERS, null ); QueryExecuteOperationFragment receiveFragment = new QueryExecuteOperationFragment( null, EXPLICIT, Collections.singletonList(memberId1) ); QueryExecuteOperation operation = createOperation( Arrays.asList(sendFragment, receiveFragment), Collections.singletonMap(EDGE_1_ID, 0), Collections.singletonMap(EDGE_1_ID, 1), Collections.singletonMap(EDGE_1_ID, EDGE_1_INITIAL_MEMORY) ); CreateExecPlanNodeVisitor visitor = visit(operation, sendFragment); SendExec sendExec = (SendExec) visitor.getExec(); Outbox outbox = sendExec.getOutbox(); assertEquals(sendNode.getId(), sendExec.getId()); assertEquals(QUERY_ID, outbox.getQueryId()); assertEquals(EDGE_1_ID, outbox.getEdgeId()); assertEquals(upstreamNode.getSchema().getEstimatedRowSize(), outbox.getRowWidth()); assertEquals(memberId1, outbox.getLocalMemberId()); assertEquals(memberId1, outbox.getTargetMemberId()); assertEquals(OUTBOX_BATCH_SIZE, outbox.getBatchSize()); assertEquals(EDGE_1_INITIAL_MEMORY, outbox.getRemainingMemory()); UpstreamExec upstreamExec = (UpstreamExec) sendExec.getUpstream(); assertEquals(upstreamNode.getId(), upstreamExec.getId()); assertEquals(0, visitor.getInboxes().size()); assertEquals(1, visitor.getOutboxes().size()); assertEquals(1, visitor.getOutboxes().get(EDGE_1_ID).size()); assertSame(outbox, visitor.getOutboxes().get(EDGE_1_ID).get(memberId1)); } @Test public void testReceive() { ReceivePlanNode receiveNode = new ReceivePlanNode( nextNodeId(), EDGE_1_ID, true, Arrays.asList(QueryDataType.INT, QueryDataType.VARCHAR) ); DownstreamNode downstreamNode = new DownstreamNode( nextNodeId(), receiveNode ); QueryExecuteOperationFragment sendFragment = new QueryExecuteOperationFragment( null, DATA_MEMBERS, partitionMapping.keySet() ); QueryExecuteOperationFragment receiveFragment = new QueryExecuteOperationFragment( downstreamNode, EXPLICIT, Collections.singletonList(memberId1) ); QueryExecuteOperation operation = createOperation( Arrays.asList(sendFragment, receiveFragment), Collections.singletonMap(EDGE_1_ID, 0), Collections.singletonMap(EDGE_1_ID, 1), Collections.singletonMap(EDGE_1_ID, EDGE_1_INITIAL_MEMORY) ); CreateExecPlanNodeVisitor visitor = visit(operation, receiveFragment); DownstreamExec downstreamExec = (DownstreamExec) visitor.pop(); assertEquals(downstreamNode.getId(), downstreamExec.getId()); ReceiveExec receiveExec = (ReceiveExec) downstreamExec.getUpstream(); assertEquals(receiveNode.getId(), receiveExec.getId()); Inbox inbox = receiveExec.getInbox(); assertEquals(QUERY_ID, inbox.getQueryId()); assertEquals(EDGE_1_ID, inbox.getEdgeId()); assertEquals(receiveNode.getSchema().getEstimatedRowSize(), inbox.getRowWidth()); assertEquals(memberId1, inbox.getLocalMemberId()); assertEquals(partitionMapping.size(), inbox.getRemainingStreams()); assertEquals(EDGE_1_INITIAL_MEMORY, ((SimpleFlowControl) inbox.getFlowControl()).getMaxMemory()); assertTrue(inbox.isOrdered()); assertEquals(1, visitor.getInboxes().size()); assertSame(inbox, visitor.getInboxes().get(EDGE_1_ID)); assertEquals(0, visitor.getOutboxes().size()); } @Test public void testProject() { UpstreamNode upstreamNode = new UpstreamNode(nextNodeId()); ProjectPlanNode projectNode = new ProjectPlanNode( nextNodeId(), upstreamNode, Collections.singletonList(ColumnExpression.create(0, QueryDataType.INT)) ); QueryExecuteOperationFragment rootFragment = new QueryExecuteOperationFragment( projectNode, EXPLICIT, Collections.singletonList(memberId1) ); QueryExecuteOperation operation = createOperation( Collections.singletonList(rootFragment), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap() ); CreateExecPlanNodeVisitor visitor = visit(operation, rootFragment); ProjectExec projectExec = (ProjectExec) visitor.pop(); assertEquals(projectNode.getId(), projectExec.getId()); assertEquals(projectNode.getProjects(), projectExec.getProjects()); UpstreamExec upstreamExec = (UpstreamExec) projectExec.getUpstream(); assertEquals(upstreamNode.getId(), upstreamExec.getId()); } @Test public void testFilter() { UpstreamNode upstreamNode = new UpstreamNode(nextNodeId()); FilterPlanNode filterNode = new FilterPlanNode( nextNodeId(), upstreamNode, new ConstantPredicateExpression(true) ); QueryExecuteOperationFragment rootFragment = new QueryExecuteOperationFragment( filterNode, EXPLICIT, Collections.singletonList(memberId1) ); QueryExecuteOperation operation = createOperation( Collections.singletonList(rootFragment), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap() ); CreateExecPlanNodeVisitor visitor = visit(operation, rootFragment); FilterExec filterExec = (FilterExec) visitor.pop(); assertEquals(filterNode.getId(), filterExec.getId()); assertEquals(filterNode.getFilter(), filterExec.getFilter()); UpstreamExec upstreamExec = (UpstreamExec) filterExec.getUpstream(); assertEquals(upstreamNode.getId(), upstreamExec.getId()); } @Test public void testMapScan() { // Map with data. checkMapScan(MAP_NAME, partitionMapping, false); // Map with data, but no partitions. Map<UUID, PartitionIdSet> partitionMapping = new HashMap<>(); partitionMapping.put(memberId1, createPartitionIdSet(PARTITION_COUNT)); partitionMapping.put(MEMBER_ID_2, createPartitionIdSet(PARTITION_COUNT, PARTITIONS_MEMBER_2)); checkMapScan(MAP_NAME, partitionMapping, true); // Not started map. checkMapScan(UUID.randomUUID().toString(), CreateExecPlanNodeVisitorTest.partitionMapping, true); } private void checkMapScan(String mapName, Map<UUID, PartitionIdSet> partitionMapping, boolean expectedEmpty) { MapScanPlanNode scanNode = new MapScanPlanNode( nextNodeId(), mapName, GenericQueryTargetDescriptor.DEFAULT, GenericQueryTargetDescriptor.DEFAULT, Arrays.asList(valuePath("field1"), valuePath("field2")), Arrays.asList(QueryDataType.INT, QueryDataType.VARCHAR), Arrays.asList(0, 1), new ConstantPredicateExpression(true) ); DownstreamNode downstreamNode = new DownstreamNode( nextNodeId(), scanNode ); QueryExecuteOperationFragment fragment = new QueryExecuteOperationFragment( downstreamNode, EXPLICIT, Collections.singletonList(memberId1) ); QueryExecuteOperation operation = createOperation( Collections.singletonList(fragment), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), partitionMapping ); CreateExecPlanNodeVisitor visitor = visit(operation, fragment); DownstreamExec downstreamExec = (DownstreamExec) visitor.pop(); assertEquals(downstreamNode.getId(), downstreamExec.getId()); if (expectedEmpty) { EmptyExec scanExec = (EmptyExec) downstreamExec.getUpstream(); assertEquals(scanNode.getId(), scanExec.getId()); } else { MapScanExec scanExec = (MapScanExec) downstreamExec.getUpstream(); assertEquals(scanNode.getId(), scanExec.getId()); assertEquals(scanNode.getMapName(), scanExec.getMap().getName()); assertEquals(scanNode.getKeyDescriptor(), scanExec.getKeyDescriptor()); assertEquals(scanNode.getValueDescriptor(), scanExec.getValueDescriptor()); assertEquals(scanNode.getFieldPaths(), scanExec.getFieldPaths()); assertEquals(scanNode.getFieldTypes(), scanExec.getFieldTypes()); assertEquals(scanNode.getProjects(), scanExec.getProjects()); assertEquals(scanNode.getFilter(), scanExec.getFilter()); } } private static CreateExecPlanNodeVisitor visit(QueryExecuteOperation operation, QueryExecuteOperationFragment fragment) { CreateExecPlanNodeVisitor res = new CreateExecPlanNodeVisitor( new LoggingQueryOperationHandler(), nodeServiceProvider, new DefaultSerializationServiceBuilder().build(), memberId1, operation, SimpleFlowControlFactory.INSTANCE, operation.getPartitionMap().get(memberId1), OUTBOX_BATCH_SIZE, null ); fragment.getNode().visit(res); return res; } private static QueryExecuteOperation createOperation( List<QueryExecuteOperationFragment> fragments, Map<Integer, Integer> outboundEdgeMap, Map<Integer, Integer> inboundEdgeMap, Map<Integer, Long> edgeInitialMemoryMap ) { return createOperation(fragments, outboundEdgeMap, inboundEdgeMap, edgeInitialMemoryMap, partitionMapping); } private static QueryExecuteOperation createOperation( List<QueryExecuteOperationFragment> fragments, Map<Integer, Integer> outboundEdgeMap, Map<Integer, Integer> inboundEdgeMap, Map<Integer, Long> edgeInitialMemoryMap, Map<UUID, PartitionIdSet> partitionMapping ) { QueryExecuteOperation operation = new QueryExecuteOperation( QUERY_ID, partitionMapping, fragments, outboundEdgeMap, inboundEdgeMap, edgeInitialMemoryMap, Collections.emptyList() ); operation.setRootConsumer(new TestRootResultConsumer(), ROOT_BATCH_SIZE); return operation; } private static PartitionIdSet createPartitionIdSet(int size, int... partitions) { PartitionIdSet res = new PartitionIdSet(size); if (partitions != null) { for (int partition : partitions) { res.add(partition); } } return res; } private int nextNodeId() { return idGenerator++; } private static class UpstreamNode implements PlanNode, CreateExecPlanNodeVisitorCallback { private final int id; private final List<QueryDataType> types; private UpstreamNode(int id) { this.id = id; types = Arrays.asList(QueryDataType.INT, QueryDataType.VARCHAR); } @Override public int getId() { return id; } @Override public void visit(PlanNodeVisitor visitor) { visitor.onOtherNode(this); } @Override public void onVisit(CreateExecPlanNodeVisitor visitor) { visitor.push(new UpstreamExec(getId())); } @Override public PlanNodeSchema getSchema() { return new PlanNodeSchema(types); } @Override public void writeData(ObjectDataOutput out) throws IOException { // No-op. } @Override public void readData(ObjectDataInput in) throws IOException { // No-op. } } private static class DownstreamNode implements PlanNode, CreateExecPlanNodeVisitorCallback { private final int id; private final PlanNode upstream; private DownstreamNode(int id, PlanNode upstream) { this.id = id; this.upstream = upstream; } @Override public int getId() { return id; } @Override public void visit(PlanNodeVisitor visitor) { upstream.visit(visitor); visitor.onOtherNode(this); } @Override public void onVisit(CreateExecPlanNodeVisitor visitor) { visitor.push(new DownstreamExec(id, visitor.pop())); } @Override public PlanNodeSchema getSchema() { return upstream.getSchema(); } @Override public void writeData(ObjectDataOutput out) throws IOException { // No-op. } @Override public void readData(ObjectDataInput in) throws IOException { // No-op. } } private static class UpstreamExec extends AbstractExec { private UpstreamExec(int id) { super(id); } @Override protected IterationResult advance0() { return null; } @Override protected RowBatch currentBatch0() { return null; } } private static class DownstreamExec extends AbstractUpstreamAwareExec { private DownstreamExec(int id, Exec upstream) { super(id, upstream); } @Override protected IterationResult advance0() { return null; } @Override protected RowBatch currentBatch0() { return null; } } private static class TestRootResultConsumer implements RootResultConsumer { @Override public void setup(ScheduleCallback scheduleCallback) { // No-op. } @Override public boolean consume(List<Row> batch, boolean last) { return false; } @Override public ResultIterator<Row> iterator() { return null; } @Override public void onError(QueryException error) { // No-op. } } }
/** * @author krvsingh */ package connectors.qc.notifier.mavenplugin; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import connectors.qc.notifier.batchclient.TDBatchClient; import connectors.qc.notifier.batchclient.commons.TDBatchClientConstants; import connectors.qc.notifier.restclient.model.ConnectionConstants; import connectors.qc.notifier.shared.TDNotifierConstants; /* * Lifecycle default -> [validate, initialize, generate-sources, process-sources, * generate-resources, process-resources, compile, * process-classes, generate-test-sources, process-test-sources, * generate-test-resources, process-test-resources, test-compile, * process-test-classes, test, prepare-package, package, * pre-integration-test, integration-test, post-integration-test, * verify, install, deploy] * * we want to report results after completion of tests and prepare-package comes after * test phase * It can be overridden in POM using execution->phase->goal setting * * Example : * <executions> * <execution> * <phase>verify</phase> * <goals> * <goal>batchnotify</goal> * </goals> * </execution> * </executions> * */ @Mojo(name = "batchnotify", defaultPhase = LifecyclePhase.PACKAGE) public class QcNotifyMojo extends AbstractMojo { /** * Location of Absolute batch file path. */ @Parameter(property = PluginConstants.PLUGIN_PROPERTY_BATCH_FILE, defaultValue = PluginConstants.PLUGIN_PROPERTY_WORKSPACE + TDNotifierConstants.DEFAULT_TD_BATCHFILE) private String batchFilePath; /** * QC connection properties Absolute File path */ @Parameter(property = PluginConstants.PLUGIN_PROPERTY_QC_CONNECTION_FILE, defaultValue = PluginConstants.PLUGIN_PROPERTY_WORKSPACE + ConnectionConstants.DEFAULT_TD_PROPERTY_FILE) private String qcPropertiesFile; /** * QC connection properties */ @Parameter(property = ConnectionConstants.PROPERTY_TD_URL, defaultValue = ConnectionConstants.DEFAULT_TD_URL) private String tdUrl; @Parameter(property = ConnectionConstants.PROPERTY_TD_DOMAIN) private String tdDomain; @Parameter(property = ConnectionConstants.PROPERTY_TD_PROJECT) private String tdProject; @Parameter(property = ConnectionConstants.PROPERTY_TD_USER) private String tdUser; @Parameter(property = ConnectionConstants.PROPERTY_TD_PASSWORD) private String tdPassword; /** * User can fail maven build if some or all notifications to QC fail It can * be done by setting this flag to "true" */ @Parameter(property = PluginConstants.PLUGIN_PROPERTY_FAILBUILD, defaultValue = "false") private String failOnNotifFailure; /** * User can call plugin without the batch file. This is a mock only mode. if * set to true build will never be failed even without a missing batch file */ @Parameter(property = PluginConstants.PLUGIN_PROPERTY_MOCK, defaultValue = "false") private String mockWithoutFail; /** * By default TDNotifier uses first instance in testset. * * It can overridden by setting this flag as "false" */ @Parameter(property = TDNotifierConstants.PROPERTY_USE_FIRST_TESTINSTANCE, defaultValue = "true") private String defaultFirstInstance; /** * By default TDNotifier has create MOde enabled. Test lab entities not * found on QC will be automatically created. * * In order to disable this set this flag as "false" */ @Parameter(property = TDNotifierConstants.PROPERTY_CREATE_IF_NOT_FOUND_MODE, defaultValue = "false") private String createMode; /** * Entry Point */ public void execute() throws MojoFailureException, MojoExecutionException { // get QC connection properties from properties file if provided getQcConnectionPropertiesFromFile(); // override file properties from the argument list of command line. overrideFromArguments(); // set QC connection properties to system (will be used by RestClient) exportQcConnectionProperties(); // set restClient behavior prior to batchClient instantiation configureBatchClientBehavior(); int exitCode = TDBatchClientConstants.SUCCESS; try { exitCode = new TDBatchClient(batchFilePath).processInputFileToQC(); } catch (Exception e) { // Will result into a BUILD FAILURE and further execution halted. throw new MojoExecutionException("Exception From BatchClient : ", e); } finally { sendExitMsgToMaven(exitCode); } } /** * Configure BatchClient behavior */ private void configureBatchClientBehavior() { /** * if -Dcreate_if_not_found=true then createMode will be set */ if ("true".equalsIgnoreCase(createMode)) { getLog().debug("BatchClient Create Mode Activated"); System.setProperty( TDNotifierConstants.PROPERTY_CREATE_IF_NOT_FOUND_MODE, "true"); } /** * if -Duse_first_instance=true then if multiple testInstances for the * same test found in a test-set, first will be used */ if (!"true".equalsIgnoreCase(defaultFirstInstance)) { getLog().debug( "BatchClient Default First Instance Mode Deactivated"); System.setProperty( TDNotifierConstants.PROPERTY_USE_FIRST_TESTINSTANCE, null); } } private void getQcConnectionPropertiesFromFile() { if (qcPropertiesFile != null && qcPropertiesFile.length() > 0) { Properties properties = new Properties(); try { FileInputStream inputStream = new FileInputStream(new File( qcPropertiesFile)); getLog().info( String.format( "Reading QC Connection Properties File : [%s]", qcPropertiesFile)); properties.load(inputStream); inputStream.close(); } catch (IOException e) { getLog().error( String.format( "QC Connection Properties File : [%s], NOT FOUND.", qcPropertiesFile)); return; } try { setQcConnectionProperties(properties); } catch (Exception e) { getLog().warn(e); } } } private void setQcConnectionProperties(Properties properties) { String urlStr = properties .getProperty(ConnectionConstants.PROPERTY_TD_URL); if (urlStr != null) { tdUrl = urlStr; } String domainStr = properties .getProperty(ConnectionConstants.PROPERTY_TD_DOMAIN); if (domainStr != null) { tdDomain = domainStr; } String projStr = properties .getProperty(ConnectionConstants.PROPERTY_TD_PROJECT); if (projStr != null) { tdProject = projStr; } String userStr = properties .getProperty(ConnectionConstants.PROPERTY_TD_USER); if (userStr != null) { tdUser = userStr; } String pswdStr = properties .getProperty(ConnectionConstants.PROPERTY_TD_PASSWORD); if (pswdStr != null) { tdPassword = pswdStr; } } /* * Override QC connection params if provided in argument list */ private void overrideFromArguments() { setQcConnectionProperties(System.getProperties()); } /* * The System properties will be extracted by the loadGlobals() in Rest * Client during login to QC */ private void exportQcConnectionProperties() { getLog().debug("QC URL = " + tdUrl); if (tdUrl != null) { System.setProperty(ConnectionConstants.PROPERTY_TD_URL, tdUrl); } getLog().debug("QC DOMAIN = " + tdDomain); if (tdDomain != null) { System.setProperty(ConnectionConstants.PROPERTY_TD_DOMAIN, tdDomain); } getLog().debug("QC PROJECT = " + tdProject); if (tdProject != null) { System.setProperty(ConnectionConstants.PROPERTY_TD_PROJECT, tdProject); } getLog().debug("QC USER = " + tdUser); if (tdUser != null) { System.setProperty(ConnectionConstants.PROPERTY_TD_USER, tdUser); } if (tdPassword != null) { System.setProperty(ConnectionConstants.PROPERTY_TD_PASSWORD, tdPassword); } } private void sendExitMsgToMaven(int exitCode) throws MojoFailureException { getLog().info("BatchClient Execution Ended with ErrCode : " + exitCode); switch (exitCode) { case TDBatchClientConstants.SUCCESS: break; /** * Malformed batch file or no tests to run */ case TDBatchClientConstants.ERR_NO_SECTIONS_IN_FILE: decideBuildStatus(failOnNotifFailure, "It seems No tests were run"); break; case TDBatchClientConstants.ERR_FEW_SECTIONS_SKIPPED: case TDBatchClientConstants.ERR_FEW_SECTIONS_FAILED: decideBuildStatus( failOnNotifFailure, "It seems all test runs were not posted to QC. See batch file : [*.unprocessed], for list of failed/skipped tests runs"); break; case TDBatchClientConstants.ERR_QC_LOGIN_FAILED: decideBuildStatus(failOnNotifFailure, "QC Login Failed, may be due to incorrect or insufficeint credentials"); break; case TDBatchClientConstants.ERR_PARSING_INPUT_FILE: decideBuildStatus(mockWithoutFail, String.format( "Failed to Parse batch File : [%s]", batchFilePath)); break; /** * Seems this plugin is outdated with respect to batchClient */ default: // May result into a BUILD FAILURE and further execution halted. throw new MojoFailureException( "Unregistered Errocode from BatchClient. Fix Needed !!"); } } private void decideBuildStatus(String flag, String errMsg) throws MojoFailureException { if ("true".equalsIgnoreCase(flag)) { // Will result into a BUILD FAILURE and further execution // halted. throw new MojoFailureException(errMsg); } else { getLog().warn(errMsg); } } }
/* * Copyright 2015-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.funtl.framework.paypal.api.openidconnect; import java.util.HashMap; import com.funtl.framework.paypal.base.Constants; import com.funtl.framework.paypal.base.rest.APIContext; import com.funtl.framework.paypal.base.rest.HttpMethod; import com.funtl.framework.paypal.base.rest.PayPalRESTException; import com.funtl.framework.paypal.base.rest.PayPalResource; /** * Class Userinfo */ public class Userinfo extends PayPalResource { /** * Subject - Identifier for the End-User at the Issuer. */ private String userId; /** * Subject - Identifier for the End-User at the Issuer. */ private String sub; /** * End-User's full name in displayable form including all name parts, possibly including titles and suffixes, ordered according to the End-User's locale and preferences. */ private String name; /** * Given name(s) or first name(s) of the End-User */ private String givenName; /** * Surname(s) or last name(s) of the End-User. */ private String familyName; /** * Middle name(s) of the End-User. */ private String middleName; /** * URL of the End-User's profile picture. */ private String picture; /** * End-User's preferred e-mail address. */ private String email; /** * True if the End-User's e-mail address has been verified; otherwise false. */ private Boolean emailVerified; /** * End-User's gender. */ private String gender; /** * End-User's birthday, represented as an YYYY-MM-DD format. They year MAY be 0000, indicating it is omited. To represent only the year, YYYY format would be used. */ private String birthdate; /** * Time zone database representing the End-User's time zone */ private String zoneinfo; /** * End-User's locale. */ private String locale; /** * End-User's preferred telephone number. */ private String phoneNumber; /** * End-User's preferred address. */ private Address address; /** * Verified account status. */ private Boolean verifiedAccount; /** * Account type. */ private String accountType; /** * Account holder age range. */ private String ageRange; /** * Account payer identifier. */ private String payerId; /** * Default Constructor */ public Userinfo() { } /** * Setter for userId */ public void setUserId(String userId) { this.userId = userId; } /** * Getter for userId */ public String getUserId() { return this.userId; } /** * Setter for sub */ public void setSub(String sub) { this.sub = sub; } /** * Getter for sub */ public String getSub() { return this.sub; } /** * Setter for name */ public void setName(String name) { this.name = name; } /** * Getter for name */ public String getName() { return this.name; } /** * Setter for givenName */ public void setGivenName(String givenName) { this.givenName = givenName; } /** * Getter for givenName */ public String getGivenName() { return this.givenName; } /** * Setter for familyName */ public void setFamilyName(String familyName) { this.familyName = familyName; } /** * Getter for familyName */ public String getFamilyName() { return this.familyName; } /** * Setter for middleName */ public void setMiddleName(String middleName) { this.middleName = middleName; } /** * Getter for middleName */ public String getMiddleName() { return this.middleName; } /** * Setter for picture */ public void setPicture(String picture) { this.picture = picture; } /** * Getter for picture */ public String getPicture() { return this.picture; } /** * Setter for email */ public void setEmail(String email) { this.email = email; } /** * Getter for email */ public String getEmail() { return this.email; } /** * Setter for emailVerified */ public void setEmailVerified(Boolean emailVerified) { this.emailVerified = emailVerified; } /** * Getter for emailVerified */ public Boolean getEmailVerified() { return this.emailVerified; } /** * Setter for gender */ public void setGender(String gender) { this.gender = gender; } /** * Getter for gender */ public String getGender() { return this.gender; } /** * Setter for birthdate */ public void setBirthdate(String birthdate) { this.birthdate = birthdate; } /** * Getter for birthdate */ public String getBirthdate() { return this.birthdate; } /** * Setter for zoneinfo */ public void setZoneinfo(String zoneinfo) { this.zoneinfo = zoneinfo; } /** * Getter for zoneinfo */ public String getZoneinfo() { return this.zoneinfo; } /** * Setter for locale */ public void setLocale(String locale) { this.locale = locale; } /** * Getter for locale */ public String getLocale() { return this.locale; } /** * Setter for phoneNumber */ public void setPhoneNumber(String phoneNumber) { this.phoneNumber = phoneNumber; } /** * Getter for phoneNumber */ public String getPhoneNumber() { return this.phoneNumber; } /** * Setter for address */ public void setAddress(Address address) { this.address = address; } /** * Getter for address */ public Address getAddress() { return this.address; } /** * Setter for verifiedAccount */ public void setVerifiedAccount(Boolean verifiedAccount) { this.verifiedAccount = verifiedAccount; } /** * Getter for verifiedAccount */ public Boolean getVerifiedAccount() { return this.verifiedAccount; } /** * Setter for accountType * * @param accountType */ public void setAccountType(String accountType) { this.accountType = accountType; } /** * Getter for accountType */ public String getAccountType() { return this.accountType; } /** * Setter for ageRange */ public void setAgeRange(String ageRange) { this.ageRange = ageRange; } /** * Getter for ageRange */ public String getAgeRange() { return this.ageRange; } /** * Setter for payerId */ public void setPayerId(String payerId) { this.payerId = payerId; } /** * Getter for payerId */ public String getPayerId() { return this.payerId; } /** * Returns user details * * @param accessToken access token * @return Userinfo * @throws PayPalRESTException * @deprecated Please use {@link #getUserinfo(APIContext)} instead. */ public static Userinfo getUserinfo(String accessToken) throws PayPalRESTException { APIContext apiContext = new APIContext(accessToken); return getUserinfo(apiContext); } /** * Returns user details * * @param apiContext {@link APIContext} to be used for the call. * @return Userinfo * @throws PayPalRESTException */ public static Userinfo getUserinfo(APIContext apiContext) throws PayPalRESTException { String resourcePath = "v1/identity/openidconnect/userinfo?schema=openid"; String payLoad = ""; String accessToken = apiContext.fetchAccessToken(); HashMap<String, String> httpHeaders = new HashMap<String, String>(); if (!accessToken.startsWith("Bearer ")) { accessToken = "Bearer " + accessToken; } httpHeaders.put(Constants.AUTHORIZATION_HEADER, accessToken); apiContext.addHTTPHeaders(httpHeaders); return configureAndExecute(apiContext, HttpMethod.GET, resourcePath, payLoad, Userinfo.class); } }
// Copyright 2015 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.actions.util; import static com.google.common.truth.Truth.assertThat; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.devtools.build.lib.actions.AbstractAction; import com.google.devtools.build.lib.actions.AbstractActionOwner; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionGraph; import com.google.devtools.build.lib.actions.ActionInputHelper; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactOwner; import com.google.devtools.build.lib.actions.Executor; import com.google.devtools.build.lib.actions.MutableActionGraph; import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.Root; import com.google.devtools.build.lib.actions.cache.MetadataHandler; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.exec.SingleBuildFileCache; import com.google.devtools.build.lib.syntax.Label; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.util.ResourceUsage; import com.google.devtools.build.lib.util.io.FileOutErr; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import com.google.devtools.build.skyframe.AbstractSkyFunctionEnvironment; import com.google.devtools.build.skyframe.BuildDriver; import com.google.devtools.build.skyframe.ErrorInfo; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.ValueOrExceptionUtils; import com.google.devtools.build.skyframe.ValueOrUntypedException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; /** * A bunch of utilities that are useful for test concerning actions, artifacts, * etc. */ public final class ActionsTestUtil { private final ActionGraph actionGraph; public ActionsTestUtil(ActionGraph actionGraph) { this.actionGraph = actionGraph; } private static final Label NULL_LABEL = Label.parseAbsoluteUnchecked("//null/action:owner"); public static ActionExecutionContext createContext(Executor executor, FileOutErr fileOutErr, Path execRoot, MetadataHandler metadataHandler, @Nullable ActionGraph actionGraph) { return new ActionExecutionContext( executor, new SingleBuildFileCache(execRoot.getPathString(), execRoot.getFileSystem()), metadataHandler, fileOutErr, actionGraph == null ? null : ActionInputHelper.actionGraphMiddlemanExpander(actionGraph)); } public static ActionExecutionContext createContextForInputDiscovery(Executor executor, FileOutErr fileOutErr, Path execRoot, MetadataHandler metadataHandler, BuildDriver buildDriver) { return ActionExecutionContext.forInputDiscovery( executor, new SingleBuildFileCache(execRoot.getPathString(), execRoot.getFileSystem()), metadataHandler, fileOutErr, new BlockingSkyFunctionEnvironment(buildDriver, executor == null ? null : executor.getEventHandler())); } /** * {@link SkyFunction.Environment} that internally makes a full Skyframe evaluate call for the * requested keys, blocking until the values are ready. */ private static class BlockingSkyFunctionEnvironment extends AbstractSkyFunctionEnvironment { private final BuildDriver driver; private final EventHandler eventHandler; private BlockingSkyFunctionEnvironment(BuildDriver driver, EventHandler eventHandler) { this.driver = driver; this.eventHandler = eventHandler; } @Override protected Map<SkyKey, ValueOrUntypedException> getValueOrUntypedExceptions( Iterable<SkyKey> depKeys) { EvaluationResult<SkyValue> evaluationResult; Map<SkyKey, ValueOrUntypedException> result = new HashMap<>(); try { evaluationResult = driver.evaluate(depKeys, /*keepGoing=*/false, ResourceUsage.getAvailableProcessors(), eventHandler); } catch (InterruptedException e) { Thread.currentThread().interrupt(); for (SkyKey key : depKeys) { result.put(key, ValueOrExceptionUtils.ofNull()); } return result; } for (SkyKey key : depKeys) { SkyValue value = evaluationResult.get(key); if (value != null) { result.put(key, ValueOrExceptionUtils.ofValue(value)); continue; } ErrorInfo errorInfo = evaluationResult.getError(key); if (errorInfo == null || errorInfo.getException() == null) { result.put(key, ValueOrExceptionUtils.ofNull()); continue; } result.put(key, ValueOrExceptionUtils.ofExn(errorInfo.getException())); } return result; } @Override public EventHandler getListener() { return null; } @Override public boolean inErrorBubblingForTesting() { return false; } } /** * A dummy ActionOwner implementation for use in tests. */ public static class NullActionOwner extends AbstractActionOwner { @Override public Label getLabel() { return NULL_LABEL; } @Override public String getConfigurationMnemonic() { return "dummy-configuration-mnemonic"; } @Override public final String getConfigurationChecksum() { return "dummy-configuration"; } } public static final Artifact DUMMY_ARTIFACT = new Artifact( new PathFragment("dummy"), Root.asSourceRoot(new InMemoryFileSystem().getRootDirectory())); public static final ActionOwner NULL_ACTION_OWNER = new NullActionOwner(); public static final ArtifactOwner NULL_ARTIFACT_OWNER = new ArtifactOwner() { @Override public Label getLabel() { return NULL_LABEL; } }; public static class UncheckedActionConflictException extends RuntimeException { public UncheckedActionConflictException(ActionConflictException e) { super(e); } } /** * A dummy Action class for use in tests. */ public static class NullAction extends AbstractAction { public NullAction() { super(NULL_ACTION_OWNER, Artifact.NO_ARTIFACTS, ImmutableList.of(DUMMY_ARTIFACT)); } public NullAction(ActionOwner owner, Artifact... outputs) { super(owner, Artifact.NO_ARTIFACTS, ImmutableList.copyOf(outputs)); } public NullAction(Artifact... outputs) { super(NULL_ACTION_OWNER, Artifact.NO_ARTIFACTS, ImmutableList.copyOf(outputs)); } @Override public String describeStrategy(Executor executor) { return ""; } @Override public void execute(ActionExecutionContext actionExecutionContext) { } @Override protected String computeKey() { return "action"; } @Override public ResourceSet estimateResourceConsumption(Executor executor) { return ResourceSet.ZERO; } @Override public String getMnemonic() { return "Null"; } } /** * For a bunch of actions, gets the basenames of the paths and accumulates * them in a space separated string, like <code>foo.o bar.o baz.a</code>. */ public static String baseNamesOf(Iterable<Artifact> artifacts) { List<String> baseNames = baseArtifactNames(artifacts); return Joiner.on(' ').join(baseNames); } /** * For a bunch of actions, gets the basenames of the paths, sorts them in alphabetical * order and accumulates them in a space separated string, for example * <code>bar.o baz.a foo.o</code>. */ public static String sortedBaseNamesOf(Iterable<Artifact> artifacts) { List<String> baseNames = baseArtifactNames(artifacts); Collections.sort(baseNames); return Joiner.on(' ').join(baseNames); } /** * For a bunch of artifacts, gets the basenames and accumulates them in a * List. */ public static List<String> baseArtifactNames(Iterable<Artifact> artifacts) { List<String> baseNames = new ArrayList<>(); for (Artifact artifact : artifacts) { baseNames.add(artifact.getExecPath().getBaseName()); } return baseNames; } /** * For a bunch of artifacts, gets the exec paths and accumulates them in a * List. */ public static List<String> execPaths(Iterable<Artifact> artifacts) { List<String> names = new ArrayList<>(); for (Artifact artifact : artifacts) { names.add(artifact.getExecPathString()); } return names; } /** * For a bunch of artifacts, gets the pretty printed names and accumulates them in a List. Note * that this returns the root-relative paths, not the exec paths. */ public static List<String> prettyArtifactNames(Iterable<Artifact> artifacts) { List<String> result = new ArrayList<>(); for (Artifact artifact : artifacts) { result.add(artifact.prettyPrint()); } return result; } public static List<String> prettyJarNames(Iterable<Artifact> jars) { List<String> result = new ArrayList<>(); for (Artifact jar : jars) { result.add(jar.prettyPrint()); } return result; } /** * Returns the closure of the predecessors of any of the given types, joining the basenames of the * artifacts into a space-separated string like "libfoo.a libbar.a libbaz.a". */ public String predecessorClosureOf(Artifact artifact, FileType... types) { return predecessorClosureOf(Collections.singleton(artifact), types); } /** * Returns the closure of the predecessors of any of the given types. */ public Collection<String> predecessorClosureAsCollection(Artifact artifact, FileType... types) { return predecessorClosureAsCollection(Collections.singleton(artifact), types); } /** * Returns the closure of the predecessors of any of the given types, joining the basenames of the * artifacts into a space-separated string like "libfoo.a libbar.a libbaz.a". */ public String predecessorClosureOf(Iterable<Artifact> artifacts, FileType... types) { Set<Artifact> visited = artifactClosureOf(artifacts); return baseNamesOf(FileType.filter(visited, types)); } /** * Returns the closure of the predecessors of any of the given types. */ public Collection<String> predecessorClosureAsCollection(Iterable<Artifact> artifacts, FileType... types) { return baseArtifactNames(FileType.filter(artifactClosureOf(artifacts), types)); } public String predecessorClosureOfJars(Iterable<Artifact> artifacts, FileType... types) { return baseNamesOf(FileType.filter(artifactClosureOf(artifacts), types)); } public Collection<String> predecessorClosureJarsAsCollection(Iterable<Artifact> artifacts, FileType... types) { Set<Artifact> visited = artifactClosureOf(artifacts); return baseArtifactNames(FileType.filter(visited, types)); } /** * Returns the closure over the input files of an action. */ public Set<Artifact> inputClosureOf(Action action) { return artifactClosureOf(action.getInputs()); } /** * Returns the closure over the input files of an artifact. */ public Set<Artifact> artifactClosureOf(Artifact artifact) { return artifactClosureOf(Collections.singleton(artifact)); } /** * Returns the closure over the input files of an artifact, filtered by the given matcher. */ public Set<Artifact> filteredArtifactClosureOf(Artifact artifact, Predicate<Artifact> matcher) { return ImmutableSet.copyOf(Iterables.filter(artifactClosureOf(artifact), matcher)); } /** * Returns the closure over the input files of a set of artifacts. */ public Set<Artifact> artifactClosureOf(Iterable<Artifact> artifacts) { Set<Artifact> visited = new LinkedHashSet<>(); List<Artifact> toVisit = Lists.newArrayList(artifacts); while (!toVisit.isEmpty()) { Artifact current = toVisit.remove(0); if (!visited.add(current)) { continue; } Action generatingAction = actionGraph.getGeneratingAction(current); if (generatingAction != null) { Iterables.addAll(toVisit, generatingAction.getInputs()); } } return visited; } /** * Returns the closure over the input files of a set of artifacts, filtered by the given matcher. */ public Set<Artifact> filteredArtifactClosureOf(Iterable<Artifact> artifacts, Predicate<Artifact> matcher) { return ImmutableSet.copyOf(Iterables.filter(artifactClosureOf(artifacts), matcher)); } /** * Returns a predicate to match {@link Artifact}s with the given root-relative path suffix. */ public static Predicate<Artifact> getArtifactSuffixMatcher(final String suffix) { return new Predicate<Artifact>() { @Override public boolean apply(Artifact input) { return input.getRootRelativePath().getPathString().endsWith(suffix); } }; } /** * Finds all the actions that are instances of <code>actionClass</code> * in the transitive closure of prerequisites. */ public <A extends Action> List<A> findTransitivePrerequisitesOf(Artifact artifact, Class<A> actionClass, Predicate<Artifact> allowedArtifacts) { List<A> actions = new ArrayList<>(); Set<Artifact> visited = new LinkedHashSet<>(); List<Artifact> toVisit = new LinkedList<>(); toVisit.add(artifact); while (!toVisit.isEmpty()) { Artifact current = toVisit.remove(0); if (!visited.add(current)) { continue; } Action generatingAction = actionGraph.getGeneratingAction(current); if (generatingAction != null) { Iterables.addAll(toVisit, Iterables.filter(generatingAction.getInputs(), allowedArtifacts)); if (actionClass.isInstance(generatingAction)) { actions.add(actionClass.cast(generatingAction)); } } } return actions; } public <A extends Action> List<A> findTransitivePrerequisitesOf( Artifact artifact, Class<A> actionClass) { return findTransitivePrerequisitesOf(artifact, actionClass, Predicates.<Artifact>alwaysTrue()); } /** * Looks in the given artifacts Iterable for the first Artifact whose path ends with the given * suffix and returns its generating Action. */ public Action getActionForArtifactEndingWith(Iterable<Artifact> artifacts, String suffix) { Artifact a = getFirstArtifactEndingWith(artifacts, suffix); return a != null ? actionGraph.getGeneratingAction(a) : null; } /** * Looks in the given artifacts Iterable for the first Artifact whose path ends with the given * suffix and returns the Artifact. */ public static Artifact getFirstArtifactEndingWith( Iterable<Artifact> artifacts, String suffix) { for (Artifact a : artifacts) { if (a.getExecPath().getPathString().endsWith(suffix)) { return a; } } return null; } /** * Returns the first artifact which is an input to "action" and has the * specified basename. An assertion error is raised if none is found. */ public static Artifact getInput(Action action, String basename) { for (Artifact artifact : action.getInputs()) { if (artifact.getExecPath().getBaseName().equals(basename)) { return artifact; } } throw new AssertionError("No input with basename '" + basename + "' in action " + action); } /** * Returns true if an artifact that is an input to "action" with the specific * basename exists. */ public static boolean hasInput(Action action, String basename) { try { getInput(action, basename); return true; } catch (AssertionError e) { return false; } } /** * Assert that an artifact is the primary output of its generating action. */ public void assertPrimaryInputAndOutputArtifacts(Artifact input, Artifact output) { Action generatingAction = actionGraph.getGeneratingAction(output); assertThat(generatingAction).isNotNull(); assertThat(generatingAction.getPrimaryOutput()).isEqualTo(output); assertThat(generatingAction.getPrimaryInput()).isEqualTo(input); } /** * Returns the first artifact which is an output of "action" and has the * specified basename. An assertion error is raised if none is found. */ public static Artifact getOutput(Action action, String basename) { for (Artifact artifact : action.getOutputs()) { if (artifact.getExecPath().getBaseName().equals(basename)) { return artifact; } } throw new AssertionError("No output with basename '" + basename + "' in action " + action); } public static void registerActionWith(Action action, MutableActionGraph actionGraph) { try { actionGraph.registerAction(action); } catch (ActionConflictException e) { throw new UncheckedActionConflictException(e); } } }
/* * Copyright (C) 2016 Iyad Kuwatly * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.ProgressBar; import com.example.android.sunshine.data.SunshinePreferences; import com.example.android.sunshine.data.WeatherContract; import com.example.android.sunshine.sync.SunshineSyncUtils; import com.example.android.sunshine.utilities.NotificationUtils; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.wearable.Wearable; public class MainActivity extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor>, ForecastAdapter.ForecastAdapterOnClickHandler, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener { private final String TAG = MainActivity.class.getSimpleName(); /* * The columns of data that we are interested in displaying within our MainActivity's list of * weather data. */ public static final String[] MAIN_FORECAST_PROJECTION = { WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, }; /* * We store the indices of the values in the array of Strings above to more quickly be able to * access the data from our query. If the order of the Strings above changes, these indices * must be adjusted to match the order of the Strings. */ public static final int INDEX_WEATHER_DATE = 0; public static final int INDEX_WEATHER_MAX_TEMP = 1; public static final int INDEX_WEATHER_MIN_TEMP = 2; public static final int INDEX_WEATHER_CONDITION_ID = 3; /* * This ID will be used to identify the Loader responsible for loading our weather forecast. In * some cases, one Activity can deal with many Loaders. However, in our case, there is only one. * We will still use this ID to initialize the loader and create the loader for best practice. * Please note that 44 was chosen arbitrarily. You can use whatever number you like, so long as * it is unique and consistent. */ private static final int ID_FORECAST_LOADER = 44; private ForecastAdapter mForecastAdapter; private RecyclerView mRecyclerView; private int mPosition = RecyclerView.NO_POSITION; private ProgressBar mLoadingIndicator; public static GoogleApiClient mGoogleApiClient; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_forecast); getSupportActionBar().setElevation(0f); /* * Using findViewById, we get a reference to our RecyclerView from xml. This allows us to * do things like set the adapter of the RecyclerView and toggle the visibility. */ mRecyclerView = (RecyclerView) findViewById(R.id.recyclerview_forecast); /* * The ProgressBar that will indicate to the user that we are loading data. It will be * hidden when no data is loading. * * Please note: This so called "ProgressBar" isn't a bar by default. It is more of a * circle. We didn't make the rules (or the names of Views), we just follow them. */ mLoadingIndicator = (ProgressBar) findViewById(R.id.pb_loading_indicator); /* * A LinearLayoutManager is responsible for measuring and positioning item views within a * RecyclerView into a linear list. This means that it can produce either a horizontal or * vertical list depending on which parameter you pass in to the LinearLayoutManager * constructor. In our case, we want a vertical list, so we pass in the constant from the * LinearLayoutManager class for vertical lists, LinearLayoutManager.VERTICAL. * * There are other LayoutManagers available to display your data in uniform grids, * staggered grids, and more! See the developer documentation for more details. * * The third parameter (shouldReverseLayout) should be true if you want to reverse your * layout. Generally, this is only true with horizontal lists that need to support a * right-to-left layout. */ LinearLayoutManager layoutManager = new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false); /* setLayoutManager associates the LayoutManager we created above with our RecyclerView */ mRecyclerView.setLayoutManager(layoutManager); /* * Use this setting to improve performance if you know that changes in content do not * change the child layout size in the RecyclerView */ mRecyclerView.setHasFixedSize(true); /* * The ForecastAdapter is responsible for linking our weather data with the Views that * will end up displaying our weather data. * * Although passing in "this" twice may seem strange, it is actually a sign of separation * of concerns, which is best programming practice. The ForecastAdapter requires an * Android Context (which all Activities are) as well as an onClickHandler. Since our * MainActivity implements the ForecastAdapter ForecastOnClickHandler interface, "this" * is also an instance of that type of handler. */ mForecastAdapter = new ForecastAdapter(this, this); /* Setting the adapter attaches it to the RecyclerView in our layout. */ mRecyclerView.setAdapter(mForecastAdapter); showLoading(); /* * Ensures a loader is initialized and active. If the loader doesn't already exist, one is * created and (if the activity/fragment is currently started) starts the loader. Otherwise * the last created loader is re-used. */ getSupportLoaderManager().initLoader(ID_FORECAST_LOADER, null, this); SunshineSyncUtils.initialize(this); mGoogleApiClient = new GoogleApiClient.Builder(this) .addApi(Wearable.API) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .build(); } /** * Uses the URI scheme for showing a location found on a map in conjunction with * an implicit Intent. This super-handy Intent is detailed in the "Common Intents" page of * Android's developer site: * * @see "http://developer.android.com/guide/components/intents-common.html#Maps" * <p> * Protip: Hold Command on Mac or Control on Windows and click that link to automagically * open the Common Intents page */ private void openPreferredLocationInMap() { double[] coords = SunshinePreferences.getLocationCoordinates(this); String posLat = Double.toString(coords[0]); String posLong = Double.toString(coords[1]); Uri geoLocation = Uri.parse("geo:" + posLat + "," + posLong); Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(geoLocation); if (intent.resolveActivity(getPackageManager()) != null) { startActivity(intent); } else { Log.d(TAG, "Couldn't call " + geoLocation.toString() + ", no receiving apps installed!"); } } /** * Called by the {@link android.support.v4.app.LoaderManagerImpl} when a new Loader needs to be * created. This Activity only uses one loader, so we don't necessarily NEED to check the * loaderId, but this is certainly best practice. * * @param loaderId The loader ID for which we need to create a loader * @param bundle Any arguments supplied by the caller * @return A new Loader instance that is ready to start loading. */ @Override public Loader<Cursor> onCreateLoader(int loaderId, Bundle bundle) { switch (loaderId) { case ID_FORECAST_LOADER: /* URI for all rows of weather data in our weather table */ Uri forecastQueryUri = WeatherContract.WeatherEntry.CONTENT_URI; /* Sort order: Ascending by date */ String sortOrder = WeatherContract.WeatherEntry.COLUMN_DATE + " ASC"; /* * A SELECTION in SQL declares which rows you'd like to return. In our case, we * want all weather data from today onwards that is stored in our weather table. * We created a handy method to do that in our WeatherEntry class. */ String selection = WeatherContract.WeatherEntry.getSqlSelectForTodayOnwards(); return new CursorLoader(this, forecastQueryUri, MAIN_FORECAST_PROJECTION, selection, null, sortOrder); default: throw new RuntimeException("Loader Not Implemented: " + loaderId); } } /** * Called when a Loader has finished loading its data. * * NOTE: There is one small bug in this code. If no data is present in the cursor do to an * initial load being performed with no access to internet, the loading indicator will show * indefinitely, until data is present from the ContentProvider. This will be fixed in a * future version of the course. * * @param loader The Loader that has finished. * @param data The data generated by the Loader. */ @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { mForecastAdapter.swapCursor(data); if (mPosition == RecyclerView.NO_POSITION) mPosition = 0; mRecyclerView.smoothScrollToPosition(mPosition); if (data.getCount() != 0) showWeatherDataView(); } /** * Called when a previously created loader is being reset, and thus making its data unavailable. * The application should at this point remove any references it has to the Loader's data. * * @param loader The Loader that is being reset. */ @Override public void onLoaderReset(Loader<Cursor> loader) { /* * Since this Loader's data is now invalid, we need to clear the Adapter that is * displaying the data. */ mForecastAdapter.swapCursor(null); } /** * This method is for responding to clicks from our list. * * @param date Normalized UTC time that represents the local date of the weather in GMT time. * @see WeatherContract.WeatherEntry#COLUMN_DATE */ @Override public void onClick(long date) { Intent weatherDetailIntent = new Intent(MainActivity.this, DetailActivity.class); Uri uriForDateClicked = WeatherContract.WeatherEntry.buildWeatherUriWithDate(date); weatherDetailIntent.setData(uriForDateClicked); startActivity(weatherDetailIntent); } /** * This method will make the View for the weather data visible and hide the error message and * loading indicator. * <p> * Since it is okay to redundantly set the visibility of a View, we don't need to check whether * each view is currently visible or invisible. */ private void showWeatherDataView() { /* First, hide the loading indicator */ mLoadingIndicator.setVisibility(View.INVISIBLE); /* Finally, make sure the weather data is visible */ mRecyclerView.setVisibility(View.VISIBLE); } /** * This method will make the loading indicator visible and hide the weather View and error * message. * <p> * Since it is okay to redundantly set the visibility of a View, we don't need to check whether * each view is currently visible or invisible. */ private void showLoading() { /* Then, hide the weather data */ mRecyclerView.setVisibility(View.INVISIBLE); /* Finally, show the loading indicator */ mLoadingIndicator.setVisibility(View.VISIBLE); } /** * This is where we inflate and set up the menu for this Activity. * * @param menu The options menu in which you place your items. * * @return You must return true for the menu to be displayed; * if you return false it will not be shown. * * @see #onPrepareOptionsMenu * @see #onOptionsItemSelected */ @Override public boolean onCreateOptionsMenu(Menu menu) { /* Use AppCompatActivity's method getMenuInflater to get a handle on the menu inflater */ MenuInflater inflater = getMenuInflater(); /* Use the inflater's inflate method to inflate our menu layout to this menu */ inflater.inflate(R.menu.forecast, menu); /* Return true so that the menu is displayed in the Toolbar */ return true; } /** * Callback invoked when a menu item was selected from this Activity's menu. * * @param item The menu item that was selected by the user * * @return true if you handle the menu click here, false otherwise */ @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_settings) { startActivity(new Intent(this, SettingsActivity.class)); return true; } if (id == R.id.action_map) { openPreferredLocationInMap(); return true; } return super.onOptionsItemSelected(item); } @Override protected void onResume() { super.onResume(); mGoogleApiClient.connect(); } @Override protected void onPause() { super.onPause(); mGoogleApiClient.disconnect(); } @Override public void onConnected(Bundle bundle) { NotificationUtils.notifyWearOfNewWeather(this); } @Override public void onConnectionSuspended(int i) { } @Override public void onConnectionFailed(ConnectionResult connectionResult) { } }
/* * Maintained by brightSPARK Labs. * www.brightsparklabs.com * * Refer to LICENSE at repository root for license details. */ package com.brightsparklabs.asanti.integration; import static org.junit.Assert.*; import com.brightsparklabs.asanti.Asanti; import com.brightsparklabs.asanti.decoder.AsnByteDecoder; import com.brightsparklabs.asanti.model.data.AsantiAsnData; import com.brightsparklabs.asanti.model.data.RawAsnData; import com.brightsparklabs.asanti.validator.FailureType; import com.brightsparklabs.asanti.validator.ValidationFailure; import com.brightsparklabs.asanti.validator.ValidationResult; import com.brightsparklabs.asanti.validator.Validator; import com.brightsparklabs.asanti.validator.Validators; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Ordering; import com.google.common.io.*; import java.io.File; import java.math.BigInteger; import java.util.Map; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Integration tests for {@link Asanti} * * @author brightSPARK Labs */ public class AsantiTest { /** class logger */ private static final Logger logger = LoggerFactory.getLogger(AsantiTest.class); @Test public void testDecodeAsnData() throws Exception { /* These results are all tightly coupled to the test data files * which is sort of inevitable, but we should keep an eye on how to better * manage this dependency/coupling, eg should we auto generate those files etc? */ logger.info("Testing just the ber"); String berFilename = getClass().getResource("/TestMostSimple.ber").getFile(); final File berFile = new File(berFilename); final ByteSource byteSource = Files.asByteSource(berFile); final ImmutableList<RawAsnData> allRawAsnData = Asanti.readAsnBerData(byteSource); int count = 0; for (final RawAsnData rawAsnData : allRawAsnData) { logger.info("PDU[" + count + "]"); final Map<String, byte[]> tagsData = rawAsnData.getBytes(); for (final String tag : Ordering.natural().immutableSortedCopy(tagsData.keySet())) { logger.info("\t {}: 0x{}", tag, BaseEncoding.base16().encode(tagsData.get(tag))); } count++; } RawAsnData rawAsnData = allRawAsnData.get(0); // expecting two tags. assertEquals(2, rawAsnData.getRawTags().size()); byte[] b0 = rawAsnData.getBytes("/0[0]").get(); // we 'know' that this is a UTF8String String s = AsnByteDecoder.decodeAsUtf8String(b0); assertEquals("Adam", s); byte[] b1 = rawAsnData.getBytes("/1[1]").get(); // we 'know' that this is an integer BigInteger big = AsnByteDecoder.decodeAsInteger(b1); assertEquals(32, big.intValue()); assertEquals("Am expecting one PDU", 1, count); } @Test public void testDecodeAsnData1() throws Exception { logger.info("testing ber against schema"); final CharSource schemaSource = Resources.asCharSource( getClass().getResource("/TestMostSimple.asn"), Charsets.UTF_8); final ByteSource berSource = Resources.asByteSource(getClass().getResource("/TestMostSimple.ber")); final ImmutableList<AsantiAsnData> allDecodedData = Asanti.decodeAsnData(berSource, schemaSource, "Human"); for (int i = 0; i < allDecodedData.size(); i++) { logger.info("Parsing PDU[{}]", i); final AsantiAsnData pdu = allDecodedData.get(i); for (String tag : pdu.getTags()) { logger.info("\t{} => {}", tag, pdu.getHexString(tag).get()); logger.info("\t\tbuiltinType {} ", pdu.getType(tag).get().getBuiltinType()); assertTrue("Tag is found with contains", pdu.contains(tag)); } for (String tag : pdu.getUnmappedTags()) { logger.info("?\t{} => {}", tag, pdu.getHexString(tag).get()); logger.info("\t\tbuiltinType{} ", pdu.getType(tag).get().getBuiltinType()); assertTrue("Tag is found with contains", pdu.contains(tag)); } } final AsantiAsnData pdu = allDecodedData.get(0); String tag = "/Human/name"; byte[] b = pdu.getBytes(tag).get(); String s = new String(b, Charsets.UTF_8); logger.info("{} is {}", tag, s); assertEquals("Adam", s); String name = pdu.getDecodedObject(tag, String.class).get(); assertEquals("Adam", name); } @Test public void testDecodeAsnData2() throws Exception { logger.info("testing ber against schema"); final CharSource schemaSource = Resources.asCharSource(getClass().getResource("/barTypeDef.asn"), Charsets.UTF_8); final ByteSource berSource = Resources.asByteSource(getClass().getResource("/bar.ber")); final ImmutableList<AsantiAsnData> allDecodedData = Asanti.decodeAsnData(berSource, schemaSource, "Bar"); for (int i = 0; i < allDecodedData.size(); i++) { logger.info("Parsing PDU[{}]", i); final AsantiAsnData pdu = allDecodedData.get(i); for (String tag : pdu.getTags()) { logger.info("\t{} => {}", tag, pdu.getHexString(tag).get()); logger.info("\t\tbuiltinType {} ", pdu.getType(tag).get().getBuiltinType()); assertTrue("Tag is found with contains", pdu.contains(tag)); } for (String tag : pdu.getUnmappedTags()) { logger.info("?\t{} => {}", tag, pdu.getHexString(tag).get()); logger.info("\t\tbuiltinType{} ", pdu.getType(tag).get().getBuiltinType()); assertTrue("Tag is found with contains", pdu.contains(tag)); } } } @Test public void testReadAsnBerFile() throws Exception { final CharSource schemaSource = Resources.asCharSource( getClass().getResource("/TestMostSimpleTypeDef.asn"), Charsets.UTF_8); final ByteSource berSource = Resources.asByteSource(getClass().getResource("/TestMostSimple.ber")); final ImmutableList<AsantiAsnData> allDecodedData = Asanti.decodeAsnData(berSource, schemaSource, "Human"); for (int i = 0; i < allDecodedData.size(); i++) { logger.info("Parsing PDU[{}]", i); final AsantiAsnData pdu = allDecodedData.get(i); for (String tag : pdu.getTags()) { logger.info("\t{} => {}", tag, pdu.getHexString(tag).get()); logger.info("\t\tbuiltinType {} ", pdu.getType(tag).get().getBuiltinType()); assertTrue("Tag is found with contains", pdu.contains(tag)); } for (String tag : pdu.getUnmappedTags()) { logger.info("?\t{} => {}", tag, pdu.getHexString(tag).get()); logger.info("\t\tbuiltinType{} ", pdu.getType(tag).get().getBuiltinType()); assertTrue("Tag is found with contains", pdu.contains(tag)); } } final AsantiAsnData pdu = allDecodedData.get(0); String tag = "/Human/name"; // we 'know' that this is a UTF8String String s = AsnByteDecoder.decodeAsUtf8String(pdu.getBytes(tag).get()); logger.info("{} is {}", tag, s); assertEquals("Adam", s); s = pdu.getDecodedObject(tag, String.class).get(); assertEquals("Adam", s); tag = "/Human/age"; // we 'know' that this is an Integer BigInteger age = AsnByteDecoder.decodeAsInteger(pdu.getBytes(tag).get()); logger.info("{} is {}", tag, age); assertEquals(new BigInteger("32"), age); age = pdu.getDecodedObject(tag, BigInteger.class).get(); assertEquals(new BigInteger("32"), age); } @Test public void testReadBerMissingField() throws Exception { final CharSource schemaSource = Resources.asCharSource( getClass().getResource("/validation/Simple3.asn"), Charsets.UTF_8); final ByteSource berSource = Resources.asByteSource(getClass().getResource("/validation/Simple3_missing_b.ber")); final ImmutableList<AsantiAsnData> allDecodedData = Asanti.decodeAsnData(berSource, schemaSource, "Human"); final Validator validator = Validators.getDefault(); final ValidationResult validationResult = validator.validate(allDecodedData.get(0)); // Ensure there are no unmapped tags assertEquals(0, allDecodedData.get(0).getUnmappedTags().size()); final ImmutableSet<ValidationFailure> failures = validationResult.getFailures(); assertEquals(1, failures.size()); final ValidationFailure failure = failures.iterator().next(); final FailureType failureType = failure.getFailureType(); assertEquals(FailureType.MandatoryFieldMissing, failureType); assertEquals("/Human/b", failure.getFailureTag()); } }
package io.eventStreamAnalytics.model; import com.google.common.base.Splitter; import org.bson.Document; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Field; import java.io.Serializable; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Map; /** * Created by badal on 1/9/16. */ @org.springframework.data.mongodb.core.mapping.Document(collection = "events") public class Event implements Serializable { @Id @Field(value = "id") private String uuId; @Field(value = "c") private String customerId; @Field(value = "s") private String sessionId; @Field(value = "e") private String eventName; @Field(value = "u") private String url; @Field(value = "d") private String deviceId; @Field(value = "da") private Long dateTimeinMillis; @Field(value = "day") private int day; @Field(value = "mo") private int month; @Field(value = "y") private int year; @Field(value = "ho") private int hour; private transient LocalDateTime dateTime; public Event() { } public Event(String message) { try { Map<String, String> split = Splitter.on("&").withKeyValueSeparator("=").split(message); this.uuId = split.get("id"); this.customerId = split.get("c"); this.sessionId = split.get("s"); this.eventName = split.get("e"); this.url = split.get("u"); this.deviceId = split.get("d"); this.dateTime = LocalDateTime.parse(split.get("date"),DateTimeFormatter.ofPattern("yyyy.MM.dd.HH:mm:ss:SSS")); this.dateTimeinMillis = this.dateTime.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli(); this.year = this.dateTime.getYear(); this.month = this.dateTime.getMonthValue(); this.day = this.dateTime.getDayOfMonth(); this.hour = this.dateTime.getHour(); } catch (RuntimeException ex) { ex.printStackTrace(); throw ex; } } public Event(String customerId, String sessionId, String eventName, String url, String deviceId, LocalDateTime dateTime) { this.customerId = customerId; this.sessionId = sessionId; this.eventName = eventName; this.url = url; this.deviceId = deviceId; this.dateTime = dateTime; this.dateTimeinMillis = dateTime.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli(); this.year = dateTime.getYear(); this.month = dateTime.getMonthValue(); this.day = dateTime.getDayOfMonth(); this.hour = dateTime.getHour(); } public String getSessionId() { return sessionId; } public String getEventName() { return eventName; } public String getUrl() { return url; } public String getCustomerId() { return this.customerId; } public String getUuId() { return this.uuId; } public void setUuId(String uuId) { this.uuId = uuId; } public void setCustomerId(String customerId) { this.customerId = customerId; } public void setSessionId(String sessionId) { this.sessionId = sessionId; } public void setEventName(String eventName) { this.eventName = eventName; } public void setUrl(String url) { this.url = url; } public String getDeviceId() { return deviceId; } public void setDeviceId(String deviceId) { this.deviceId = deviceId; } public int getDay() { return day; } public void setDay(int day) { this.day = day; } public int getMonth() { return month; } public void setMonth(int month) { this.month = month; } public int getYear() { return year; } public void setYear(int year) { this.year = year; } public int getHour() { return hour; } public void setHour(int hour) { this.hour = hour; } public Long getDateTimeinMillis() { return dateTimeinMillis; } public void setDateTimeinMillis(Long dateTimeinMillis) { this.dateTimeinMillis = dateTimeinMillis; } public LocalDateTime getDateTime() { return dateTime; } public void setDateTime(LocalDateTime dateTime) { this.dateTime = dateTime; } public String serialize() { StringBuilder sb = new StringBuilder(); sb.append("/events?"); sb.append("c=").append(customerId); sb.append("&s=").append(sessionId); sb.append("&e=").append(eventName); sb.append("&u=").append(url); sb.append("&d=").append(deviceId); sb.append("&udate=").append(dateTime.format(DateTimeFormatter.ofPattern("yyyy.MM.dd.HH:mm:ss:SSS"))); return sb.toString(); } public Document getDbObject() { return new Document() .append("c", getCustomerId()) .append("id", getUuId()) .append("s", getSessionId()) .append("e", getEventName()) .append("d", getDeviceId()) .append("u", getUrl()) .append("da", getDateTimeinMillis()) .append("y", getYear()) .append("day", getDay()) .append("mo", getMonth()) .append("ho", getHour()); } }
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * ----------------- * XYBarDataset.java * ----------------- * (C) Copyright 2004-2007, by Object Refinery Limited and Contributors. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): -; * * Changes * ------- * 02-Mar-2004 : Version 1 (DG); * 05-May-2004 : Now extends AbstractIntervalXYDataset (DG); * 15-Jul-2004 : Switched getX() with getXValue() and getY() with * getYValue() (DG); * ------------- JFREECHART 1.0.x --------------------------------------------- * 25-Jan-2007 : Added some accessor methods, plus new equals() and clone() * overrides (DG); * 30-Jan-2007 : Added method overrides to prevent unnecessary object * creation (DG); * */ package org.jfree.data.xy; import org.jfree.data.general.DatasetChangeEvent; import org.jfree.data.general.DatasetChangeListener; import org.jfree.util.PublicCloneable; /** * A dataset wrapper class that converts a standard {@link XYDataset} into an * {@link IntervalXYDataset} suitable for use in creating XY bar charts. */ public class XYBarDataset extends AbstractIntervalXYDataset implements IntervalXYDataset, DatasetChangeListener { /** The underlying dataset. */ private XYDataset underlying; /** The bar width. */ private double barWidth; /** * Creates a new dataset. * * @param underlying the underlying dataset (<code>null</code> not * permitted). * @param barWidth the width of the bars. */ public XYBarDataset(XYDataset underlying, double barWidth) { this.underlying = underlying; this.underlying.addChangeListener(this); this.barWidth = barWidth; } /** * Returns the underlying dataset that was specified via the constructor. * * @return The underlying dataset (never <code>null</code>). * * @since 1.0.4 */ public XYDataset getUnderlyingDataset() { return this.underlying; } /** * Returns the bar width. * * @return The bar width. * * @see #setBarWidth(double) * @since 1.0.4 */ public double getBarWidth() { return this.barWidth; } /** * Sets the bar width and sends a {@link DatasetChangeEvent} to all * registered listeners. * * @param barWidth the bar width. * * @see #getBarWidth() * @since 1.0.4 */ public void setBarWidth(double barWidth) { this.barWidth = barWidth; notifyListeners(new DatasetChangeEvent(this, this)); } /** * Returns the number of series in the dataset. * * @return The series count. */ public int getSeriesCount() { return this.underlying.getSeriesCount(); } /** * Returns the key for a series. * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * * @return The series key. */ public Comparable getSeriesKey(int series) { return this.underlying.getSeriesKey(series); } /** * Returns the number of items in a series. * * @param series the series index (zero-based). * * @return The item count. */ public int getItemCount(int series) { return this.underlying.getItemCount(series); } /** * Returns the x-value for an item within a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The x-value. * * @see #getXValue(int, int) */ public Number getX(int series, int item) { return this.underlying.getX(series, item); } /** * Returns the x-value (as a double primitive) for an item within a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. * * @see #getX(int, int) */ public double getXValue(int series, int item) { return this.underlying.getXValue(series, item); } /** * Returns the y-value for an item within a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The y-value (possibly <code>null</code>). * * @see #getYValue(int, int) */ public Number getY(int series, int item) { return this.underlying.getY(series, item); } /** * Returns the y-value (as a double primitive) for an item within a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. * * @see #getY(int, int) */ public double getYValue(int series, int item) { return this.underlying.getYValue(series, item); } /** * Returns the starting X value for the specified series and item. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ public Number getStartX(int series, int item) { Number result = null; Number xnum = this.underlying.getX(series, item); if (xnum != null) { result = new Double(xnum.doubleValue() - this.barWidth / 2.0); } return result; } /** * Returns the starting x-value (as a double primitive) for an item within * a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. * * @see #getXValue(int, int) */ public double getStartXValue(int series, int item) { return getXValue(series, item) - this.barWidth / 2.0; } /** * Returns the ending X value for the specified series and item. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ public Number getEndX(int series, int item) { Number result = null; Number xnum = this.underlying.getX(series, item); if (xnum != null) { result = new Double(xnum.doubleValue() + this.barWidth / 2.0); } return result; } /** * Returns the ending x-value (as a double primitive) for an item within * a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. * * @see #getXValue(int, int) */ public double getEndXValue(int series, int item) { return getXValue(series, item) + this.barWidth / 2.0; } /** * Returns the starting Y value for the specified series and item. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ public Number getStartY(int series, int item) { return this.underlying.getY(series, item); } /** * Returns the starting y-value (as a double primitive) for an item within * a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. * * @see #getYValue(int, int) */ public double getStartYValue(int series, int item) { return getYValue(series, item); } /** * Returns the ending Y value for the specified series and item. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ public Number getEndY(int series, int item) { return this.underlying.getY(series, item); } /** * Returns the ending y-value (as a double primitive) for an item within * a series. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. * * @see #getYValue(int, int) */ public double getEndYValue(int series, int item) { return getYValue(series, item); } /** * Receives notification of an dataset change event. * * @param event information about the event. */ public void datasetChanged(DatasetChangeEvent event) { this.notifyListeners(event); } /** * Tests this dataset for equality with an arbitrary object. * * @param obj the object (<code>null</code> permitted). * * @return A boolean. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof XYBarDataset)) { return false; } XYBarDataset that = (XYBarDataset) obj; if (!this.underlying.equals(that.underlying)) { return false; } if (this.barWidth != that.barWidth) { return false; } return true; } /** * Returns an independent copy of the dataset. Note that: * <ul> * <li>the underlying dataset is only cloned if it implements the * {@link PublicCloneable} interface;</li> * <li>the listeners registered with this dataset are not carried over to * the cloned dataset.</li> * </ul> * * @return An independent copy of the dataset. * * @throws CloneNotSupportedException if the dataset cannot be cloned for * any reason. */ public Object clone() throws CloneNotSupportedException { XYBarDataset clone = (XYBarDataset) super.clone(); if (this.underlying instanceof PublicCloneable) { clone.underlying = (XYDataset) ((PublicCloneable) this.underlying).clone(); } return clone; } }
package org.apache.lucene.search.grouping; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.*; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.*; import org.apache.lucene.index.*; import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.grouping.dv.DVDistinctValuesCollector; import org.apache.lucene.search.grouping.dv.DVFirstPassGroupingCollector; import org.apache.lucene.search.grouping.function.FunctionDistinctValuesCollector; import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector; import org.apache.lucene.search.grouping.term.TermDistinctValuesCollector; import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util._TestUtil; import org.apache.lucene.util.mutable.MutableValue; import org.apache.lucene.util.mutable.MutableValueStr; public class DistinctValuesCollectorTest extends AbstractGroupingTestCase { private final static NullComparator nullComparator = new NullComparator(); private final String groupField = "author"; private final String countField = "publisher"; public void testSimple() throws Exception { Random random = random(); DocValues.Type[] dvTypes = new DocValues.Type[]{ DocValues.Type.VAR_INTS, DocValues.Type.FLOAT_64, DocValues.Type.BYTES_VAR_STRAIGHT, DocValues.Type.BYTES_VAR_SORTED }; Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); boolean canUseDV = !"Lucene3x".equals(w.w.getConfig().getCodec().getName()); DocValues.Type dvType = canUseDV ? dvTypes[random.nextInt(dvTypes.length)] : null; Document doc = new Document(); addField(doc, groupField, "1", dvType); addField(doc, countField, "1", dvType); doc.add(new TextField("content", "random text", Field.Store.NO)); doc.add(new StringField("id", "1", Field.Store.NO)); w.addDocument(doc); // 1 doc = new Document(); addField(doc, groupField, "1", dvType); addField(doc, countField, "1", dvType); doc.add(new TextField("content", "some more random text blob", Field.Store.NO)); doc.add(new StringField("id", "2", Field.Store.NO)); w.addDocument(doc); // 2 doc = new Document(); addField(doc, groupField, "1", dvType); addField(doc, countField, "2", dvType); doc.add(new TextField("content", "some more random textual data", Field.Store.NO)); doc.add(new StringField("id", "3", Field.Store.NO)); w.addDocument(doc); w.commit(); // To ensure a second segment // 3 doc = new Document(); addField(doc, groupField, "2", dvType); doc.add(new TextField("content", "some random text", Field.Store.NO)); doc.add(new StringField("id", "4", Field.Store.NO)); w.addDocument(doc); // 4 doc = new Document(); addField(doc, groupField, "3", dvType); addField(doc, countField, "1", dvType); doc.add(new TextField("content", "some more random text", Field.Store.NO)); doc.add(new StringField("id", "5", Field.Store.NO)); w.addDocument(doc); // 5 doc = new Document(); addField(doc, groupField, "3", dvType); addField(doc, countField, "1", dvType); doc.add(new TextField("content", "random blob", Field.Store.NO)); doc.add(new StringField("id", "6", Field.Store.NO)); w.addDocument(doc); // 6 -- no author field doc = new Document(); doc.add(new TextField("content", "random word stuck in alot of other text", Field.Store.YES)); addField(doc, countField, "1", dvType); doc.add(new StringField("id", "6", Field.Store.NO)); w.addDocument(doc); IndexSearcher indexSearcher = newSearcher(w.getReader()); w.close(); Comparator<AbstractDistinctValuesCollector.GroupCount<Comparable<Object>>> cmp = new Comparator<AbstractDistinctValuesCollector.GroupCount<Comparable<Object>>>() { public int compare(AbstractDistinctValuesCollector.GroupCount<Comparable<Object>> groupCount1, AbstractDistinctValuesCollector.GroupCount<Comparable<Object>> groupCount2) { if (groupCount1.groupValue == null) { if (groupCount2.groupValue == null) { return 0; } return -1; } else if (groupCount2.groupValue == null) { return 1; } else { return groupCount1.groupValue.compareTo(groupCount2.groupValue); } } }; // === Search for content:random AbstractFirstPassGroupingCollector<Comparable<Object>> firstCollector = createRandomFirstPassCollector(dvType, new Sort(), groupField, 10); indexSearcher.search(new TermQuery(new Term("content", "random")), firstCollector); AbstractDistinctValuesCollector<? extends AbstractDistinctValuesCollector.GroupCount<Comparable<Object>>> distinctValuesCollector = createDistinctCountCollector(firstCollector, groupField, countField, dvType); indexSearcher.search(new TermQuery(new Term("content", "random")), distinctValuesCollector); List<? extends AbstractDistinctValuesCollector.GroupCount<Comparable<Object>>> gcs = distinctValuesCollector.getGroups(); Collections.sort(gcs, cmp); assertEquals(4, gcs.size()); compareNull(gcs.get(0).groupValue); List<Comparable<?>> countValues = new ArrayList<Comparable<?>>(gcs.get(0).uniqueValues); assertEquals(1, countValues.size()); compare("1", countValues.get(0)); compare("1", gcs.get(1).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(1).uniqueValues); Collections.sort(countValues, nullComparator); assertEquals(2, countValues.size()); compare("1", countValues.get(0)); compare("2", countValues.get(1)); compare("2", gcs.get(2).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(2).uniqueValues); assertEquals(1, countValues.size()); compareNull(countValues.get(0)); compare("3", gcs.get(3).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(3).uniqueValues); assertEquals(1, countValues.size()); compare("1", countValues.get(0)); // === Search for content:some firstCollector = createRandomFirstPassCollector(dvType, new Sort(), groupField, 10); indexSearcher.search(new TermQuery(new Term("content", "some")), firstCollector); distinctValuesCollector = createDistinctCountCollector(firstCollector, groupField, countField, dvType); indexSearcher.search(new TermQuery(new Term("content", "some")), distinctValuesCollector); gcs = distinctValuesCollector.getGroups(); Collections.sort(gcs, cmp); assertEquals(3, gcs.size()); compare("1", gcs.get(0).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(0).uniqueValues); assertEquals(2, countValues.size()); Collections.sort(countValues, nullComparator); compare("1", countValues.get(0)); compare("2", countValues.get(1)); compare("2", gcs.get(1).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(1).uniqueValues); assertEquals(1, countValues.size()); compareNull(countValues.get(0)); compare("3", gcs.get(2).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(2).uniqueValues); assertEquals(1, countValues.size()); compare("1", countValues.get(0)); // === Search for content:blob firstCollector = createRandomFirstPassCollector(dvType, new Sort(), groupField, 10); indexSearcher.search(new TermQuery(new Term("content", "blob")), firstCollector); distinctValuesCollector = createDistinctCountCollector(firstCollector, groupField, countField, dvType); indexSearcher.search(new TermQuery(new Term("content", "blob")), distinctValuesCollector); gcs = distinctValuesCollector.getGroups(); Collections.sort(gcs, cmp); assertEquals(2, gcs.size()); compare("1", gcs.get(0).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(0).uniqueValues); // B/c the only one document matched with blob inside the author 1 group assertEquals(1, countValues.size()); compare("1", countValues.get(0)); compare("3", gcs.get(1).groupValue); countValues = new ArrayList<Comparable<?>>(gcs.get(1).uniqueValues); assertEquals(1, countValues.size()); compare("1", countValues.get(0)); indexSearcher.getIndexReader().close(); dir.close(); } public void testRandom() throws Exception { Random random = random(); int numberOfRuns = _TestUtil.nextInt(random, 3, 6); for (int indexIter = 0; indexIter < numberOfRuns; indexIter++) { IndexContext context = createIndexContext(); for (int searchIter = 0; searchIter < 100; searchIter++) { final IndexSearcher searcher = newSearcher(context.indexReader); boolean useDv = context.dvType != null && random.nextBoolean(); DocValues.Type dvType = useDv ? context.dvType : null; String term = context.contentStrings[random.nextInt(context.contentStrings.length)]; Sort groupSort = new Sort(new SortField("id", SortField.Type.STRING)); int topN = 1 + random.nextInt(10); List<AbstractDistinctValuesCollector.GroupCount<Comparable<?>>> expectedResult = createExpectedResult(context, term, groupSort, topN); AbstractFirstPassGroupingCollector<Comparable<?>> firstCollector = createRandomFirstPassCollector(dvType, groupSort, groupField, topN); searcher.search(new TermQuery(new Term("content", term)), firstCollector); AbstractDistinctValuesCollector<? extends AbstractDistinctValuesCollector.GroupCount<Comparable<?>>> distinctValuesCollector = createDistinctCountCollector(firstCollector, groupField, countField, dvType); searcher.search(new TermQuery(new Term("content", term)), distinctValuesCollector); @SuppressWarnings("unchecked") List<AbstractDistinctValuesCollector.GroupCount<Comparable<?>>> actualResult = (List<AbstractDistinctValuesCollector.GroupCount<Comparable<?>>>) distinctValuesCollector.getGroups(); if (VERBOSE) { System.out.println("Index iter=" + indexIter); System.out.println("Search iter=" + searchIter); System.out.println("Collector class name=" + distinctValuesCollector.getClass().getName()); } assertEquals(expectedResult.size(), actualResult.size()); for (int i = 0; i < expectedResult.size(); i++) { AbstractDistinctValuesCollector.GroupCount<Comparable<?>> expected = expectedResult.get(i); AbstractDistinctValuesCollector.GroupCount<Comparable<?>> actual = actualResult.get(i); assertValues(expected.groupValue, actual.groupValue); assertEquals(expected.uniqueValues.size(), actual.uniqueValues.size()); List<Comparable<?>> expectedUniqueValues = new ArrayList<Comparable<?>>(expected.uniqueValues); Collections.sort(expectedUniqueValues, nullComparator); List<Comparable<?>> actualUniqueValues = new ArrayList<Comparable<?>>(actual.uniqueValues); Collections.sort(actualUniqueValues, nullComparator); for (int j = 0; j < expected.uniqueValues.size(); j++) { assertValues(expectedUniqueValues.get(j), actualUniqueValues.get(j)); } } } context.indexReader.close(); context.directory.close(); } } private void assertValues(Object expected, Object actual) { if (expected == null) { compareNull(actual); } else { compare(((BytesRef) expected).utf8ToString(), actual); } } private void compare(String expected, Object groupValue) { if (BytesRef.class.isAssignableFrom(groupValue.getClass())) { assertEquals(expected, ((BytesRef) groupValue).utf8ToString()); } else if (Double.class.isAssignableFrom(groupValue.getClass())) { assertEquals(Double.parseDouble(expected), groupValue); } else if (Long.class.isAssignableFrom(groupValue.getClass())) { assertEquals(Long.parseLong(expected), groupValue); } else if (MutableValue.class.isAssignableFrom(groupValue.getClass())) { MutableValueStr mutableValue = new MutableValueStr(); mutableValue.value = new BytesRef(expected); assertEquals(mutableValue, groupValue); } else { fail(); } } private void compareNull(Object groupValue) { if (groupValue == null) { return; // term based impl... } // DV based impls.. if (BytesRef.class.isAssignableFrom(groupValue.getClass())) { assertEquals("", ((BytesRef) groupValue).utf8ToString()); } else if (Double.class.isAssignableFrom(groupValue.getClass())) { assertEquals(0.0d, groupValue); } else if (Long.class.isAssignableFrom(groupValue.getClass())) { assertEquals(0L, groupValue); // Function based impl } else if (MutableValue.class.isAssignableFrom(groupValue.getClass())) { assertFalse(((MutableValue) groupValue).exists()); } else { fail(); } } private void addField(Document doc, String field, String value, DocValues.Type type) { doc.add(new StringField(field, value, Field.Store.NO)); if (type == null) { return; } Field valuesField = null; switch (type) { case VAR_INTS: valuesField = new PackedLongDocValuesField(field, Integer.parseInt(value)); break; case FLOAT_64: valuesField = new DoubleDocValuesField(field, Double.parseDouble(value)); break; case BYTES_VAR_STRAIGHT: valuesField = new StraightBytesDocValuesField(field, new BytesRef(value)); break; case BYTES_VAR_SORTED: valuesField = new SortedBytesDocValuesField(field, new BytesRef(value)); break; } doc.add(valuesField); } @SuppressWarnings({"unchecked","rawtypes"}) private <T extends Comparable> AbstractDistinctValuesCollector<AbstractDistinctValuesCollector.GroupCount<T>> createDistinctCountCollector(AbstractFirstPassGroupingCollector<T> firstPassGroupingCollector, String groupField, String countField, DocValues.Type dvType) { Random random = random(); Collection<SearchGroup<T>> searchGroups = firstPassGroupingCollector.getTopGroups(0, false); if (DVFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) { boolean diskResident = random.nextBoolean(); return DVDistinctValuesCollector.create(groupField, countField, searchGroups, diskResident, dvType); } else if (FunctionFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) { return (AbstractDistinctValuesCollector) new FunctionDistinctValuesCollector(new HashMap<Object, Object>(), new BytesRefFieldSource(groupField), new BytesRefFieldSource(countField), (Collection) searchGroups); } else { return (AbstractDistinctValuesCollector) new TermDistinctValuesCollector(groupField, countField, (Collection) searchGroups); } } @SuppressWarnings({"unchecked","rawtypes"}) private <T> AbstractFirstPassGroupingCollector<T> createRandomFirstPassCollector(DocValues.Type dvType, Sort groupSort, String groupField, int topNGroups) throws IOException { Random random = random(); if (dvType != null) { if (random.nextBoolean()) { boolean diskResident = random.nextBoolean(); return DVFirstPassGroupingCollector.create(groupSort, topNGroups, groupField, dvType, diskResident); } else if (random.nextBoolean()) { return (AbstractFirstPassGroupingCollector<T>) new FunctionFirstPassGroupingCollector(new BytesRefFieldSource(groupField), new HashMap<Object, Object>(), groupSort, topNGroups); } else { return (AbstractFirstPassGroupingCollector<T>) new TermFirstPassGroupingCollector(groupField, groupSort, topNGroups); } } else { if (random.nextBoolean()) { return (AbstractFirstPassGroupingCollector<T>) new FunctionFirstPassGroupingCollector(new BytesRefFieldSource(groupField), new HashMap<Object, Object>(), groupSort, topNGroups); } else { return (AbstractFirstPassGroupingCollector<T>) new TermFirstPassGroupingCollector(groupField, groupSort, topNGroups); } } } @SuppressWarnings({"unchecked","rawtypes"}) private List<AbstractDistinctValuesCollector.GroupCount<Comparable<?>>> createExpectedResult(IndexContext context, String term, Sort groupSort, int topN) { class GroupCount extends AbstractDistinctValuesCollector.GroupCount<BytesRef> { GroupCount(BytesRef groupValue, Collection<BytesRef> uniqueValues) { super(groupValue); this.uniqueValues.addAll(uniqueValues); } } List result = new ArrayList(); Map<String, Set<String>> groupCounts = context.searchTermToGroupCounts.get(term); int i = 0; for (String group : groupCounts.keySet()) { if (topN <= i++) { break; } Set<BytesRef> uniqueValues = new HashSet<BytesRef>(); for (String val : groupCounts.get(group)) { uniqueValues.add(val != null ? new BytesRef(val) : null); } result.add(new GroupCount(group != null ? new BytesRef(group) : null, uniqueValues)); } return result; } private IndexContext createIndexContext() throws Exception { Random random = random(); DocValues.Type[] dvTypes = new DocValues.Type[]{ DocValues.Type.BYTES_VAR_STRAIGHT, DocValues.Type.BYTES_VAR_SORTED }; Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()) ); boolean canUseDV = !"Lucene3x".equals(w.w.getConfig().getCodec().getName()); DocValues.Type dvType = canUseDV ? dvTypes[random.nextInt(dvTypes.length)] : null; int numDocs = 86 + random.nextInt(1087) * RANDOM_MULTIPLIER; String[] groupValues = new String[numDocs / 5]; String[] countValues = new String[numDocs / 10]; for (int i = 0; i < groupValues.length; i++) { groupValues[i] = generateRandomNonEmptyString(); } for (int i = 0; i < countValues.length; i++) { countValues[i] = generateRandomNonEmptyString(); } List<String> contentStrings = new ArrayList<String>(); Map<String, Map<String, Set<String>>> searchTermToGroupCounts = new HashMap<String, Map<String, Set<String>>>(); for (int i = 1; i <= numDocs; i++) { String groupValue = random.nextInt(23) == 14 ? null : groupValues[random.nextInt(groupValues.length)]; String countValue = random.nextInt(21) == 13 ? null : countValues[random.nextInt(countValues.length)]; String content = "random" + random.nextInt(numDocs / 20); Map<String, Set<String>> groupToCounts = searchTermToGroupCounts.get(content); if (groupToCounts == null) { // Groups sort always DOCID asc... searchTermToGroupCounts.put(content, groupToCounts = new LinkedHashMap<String, Set<String>>()); contentStrings.add(content); } Set<String> countsVals = groupToCounts.get(groupValue); if (countsVals == null) { groupToCounts.put(groupValue, countsVals = new HashSet<String>()); } countsVals.add(countValue); Document doc = new Document(); doc.add(new StringField("id", String.format(Locale.ROOT, "%09d", i), Field.Store.NO)); if (groupValue != null) { addField(doc, groupField, groupValue, dvType); } if (countValue != null) { addField(doc, countField, countValue, dvType); } doc.add(new TextField("content", content, Field.Store.NO)); w.addDocument(doc); } DirectoryReader reader = w.getReader(); w.close(); return new IndexContext(dir, reader, dvType, searchTermToGroupCounts, contentStrings.toArray(new String[contentStrings.size()])); } private static class IndexContext { final Directory directory; final DirectoryReader indexReader; final DocValues.Type dvType; final Map<String, Map<String, Set<String>>> searchTermToGroupCounts; final String[] contentStrings; IndexContext(Directory directory, DirectoryReader indexReader, DocValues.Type dvType, Map<String, Map<String, Set<String>>> searchTermToGroupCounts, String[] contentStrings) { this.directory = directory; this.indexReader = indexReader; this.dvType = dvType; this.searchTermToGroupCounts = searchTermToGroupCounts; this.contentStrings = contentStrings; } } private static class NullComparator implements Comparator<Comparable<?>> { @SuppressWarnings({"unchecked","rawtypes"}) public int compare(Comparable a, Comparable b) { if (a == b) { return 0; } else if (a == null) { return -1; } else if (b == null) { return 1; } else { return a.compareTo(b); } } } }
/* * */ package net.community.apps.apache.maven.pom2cpsync; import java.awt.Color; import java.awt.Component; import java.awt.Font; import java.util.Collection; import java.util.LinkedList; import java.util.Map; import javax.swing.table.TableCellRenderer; import net.community.chest.CoVariantReturn; import net.community.chest.apache.maven.helpers.DependenciesList; import net.community.chest.ui.helpers.table.TypedTable; /** * <P>Copyright 2008 as per GPLv2</P> * * @author Lyor G. * @since Aug 14, 2008 1:28:02 PM */ public class DependencyDetailsTable extends TypedTable<DependencyTargetEntry> { /** * */ private static final long serialVersionUID = -4466027666470559750L; public DependencyDetailsTable (final DependencyDetailsTableModel model) { super(model); setRowSorter(new DependencyDetailsTableRowSorter(model)); } public DependencyDetailsTable () { this(new DependencyDetailsTableModel()); } /* * @see javax.swing.JTable#getRowSorter() */ @Override @CoVariantReturn public DependencyDetailsTableRowSorter getRowSorter () { return DependencyDetailsTableRowSorter.class.cast(super.getRowSorter()); } /* * @see net.community.chest.swing.component.table.TypedTable#getTypedModel() */ @Override @CoVariantReturn public DependencyDetailsTableModel /* co-variant */ getTypedModel () { return DependencyDetailsTableModel.class.cast(super.getTypedModel()); } /** * Looks for 1st entry whose group/artifact/version matches the specified one * (case <U>insensitive</U>). * @param groupId Group ID * @param artifactId Artifact name * @param version Version name - if null/empty then version is ignored * @return Index of 1st match - negative if not found */ public int indexOf (final String groupId, final String artifactId, final String version /* may be null/empty */) { return DependenciesList.indexOf(getTypedModel(), groupId, artifactId, version); } /** * Looks for 1st entry whose group/artifact matches the specified one * (case <U>insensitive</U>). * @param groupId Group ID * @param artifactId Artifact name * @return Index of 1st match - negative if not found * @see #indexOf(String, String, String) for specifying a version as well */ public int indexOf (final String groupId, final String artifactId) { return indexOf(groupId, artifactId, null); } private Collection<? extends DependencyTargetEntry> _lastDeps /* =null */; private void setDependencies (final Collection<? extends DependencyTargetEntry> deps, final boolean saveDeps) { final DependencyDetailsTableModel model=getTypedModel(); final int curItems=model.size(), numDeps=(null == deps) ? 0 : deps.size(); model.setDependencies(deps); if (saveDeps) _lastDeps = deps; // signal change only if had some items or have some new ones if ((curItems > 0) || (numDeps > 0)) model.fireTableDataChanged(); } public void setDependencies (final Collection<? extends DependencyTargetEntry> deps) { setDependencies(deps, true); } /* Since renderer(s) are re-used we need to prepare them every time * @see javax.swing.JTable#prepareRenderer(javax.swing.table.TableCellRenderer, int, int) */ @Override public Component prepareRenderer (TableCellRenderer renderer, int row, int column) { final Component c=super.prepareRenderer(renderer, row, column); if (null == c) return c; final DependencyDetailsTableModel model=getTypedModel(); final int numRows=(null == model) ? 0 : model.size(); final DependencyTargetEntry t=((row < 0) || (row >= numRows)) ? null : model.get(row); final Color cc=(null == t) ? null : t.getMatchColor(); if (cc != null) { final Font f=c.getFont(), ef=(null == f) ? null : f.deriveFont(Font.BOLD); if (ef != null) c.setFont(ef); c.setBackground(cc); } else c.setBackground(getBackground()); return c; } public void markMismatchedDependencies (final Collection<? extends Map.Entry<Integer,Color>> cl) { if ((null == cl) || (cl.size() <= 0)) return; final DependencyDetailsTableModel model=getTypedModel(); final int numRows=(null == model) ? 0 : model.size(); for (final Map.Entry<Integer,Color> ce : cl) { final Integer rowIndex=(null == ce) ? null : ce.getKey(); final int row=(null == rowIndex) ? Integer.MIN_VALUE : rowIndex.intValue(); final Color c=(null == ce) ? null : ce.getValue(); final DependencyTargetEntry t=((row < 0) || (row >= numRows)) ? null : model.get(row); if ((null == t) || (null == c)) continue; t.setMatchColor(c); } if (model != null) model.fireTableDataChanged(); } private DependencyMismatchType _filterMode=DependencyMismatchType.ALL; public DependencyMismatchType getFilterMode () { return _filterMode; } private static Collection<? extends DependencyTargetEntry> filterDependencies ( final Collection<? extends DependencyTargetEntry> deps, final DependencyMismatchType mode) { if ((null == mode) || DependencyMismatchType.ALL.equals(mode) || (null == deps) || (deps.size() <= 0)) return deps; Collection<DependencyTargetEntry> ret=null; for (final DependencyTargetEntry tgt : deps) { if (null == tgt) continue; final Color c=tgt.getMatchColor(); switch(mode) { case ALL : break; // should not happen case MATCHING : if (c != null) continue; break; case MISMATCHED : if ((null == c) || (!DependencyTargetEntry.BAD_VERSION_COLOR.equals(c))) continue; break; case MISSING : if ((null == c) || (!DependencyTargetEntry.NO_ENTRY_COLOR.equals(c))) continue; break; default : // should not happen } if (null == ret) ret = new LinkedList<DependencyTargetEntry>(); ret.add(tgt); } return ret; } // filtering is implemented here instead of the sorter since doing it in the sorter causes a row number mismatch with "prepareRenderer" public void setFilterMode (final DependencyMismatchType mode) { if (null == (_filterMode=mode)) _filterMode = DependencyMismatchType.ALL; final Collection<? extends DependencyTargetEntry> deps=filterDependencies(_lastDeps, mode); setDependencies(deps, false); } }
package mariculture.api.fishery.fish; import java.util.ArrayList; import java.util.List; import java.util.Random; import mariculture.api.fishery.Fishing; import mariculture.core.util.MCTranslate; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.StatCollector; public class FishDNABase { protected String category; public static final ArrayList<FishDNABase> DNAParts = new ArrayList<FishDNABase>(); public FishDNABase register() { DNAParts.add(this); return this; } public String getName() { return this.getClass().getSimpleName().substring(7); } /** The chance that this DNA is extracted by an extractor, 1 in this many **/ public int getCopyChance() { return 10; } /** The name of the string the egg array saves this in **/ public String getEggString() { return getName() + "List"; } /** The name of the string to save the Dominant part of the gene as **/ public String getHigherString() { return getName(); } /** The name of the string to save the Recessive part of the gene as **/ public String getLowerString() { return "lower" + getName(); } /** Add information about this piece of DNA to the list if necessary **/ public void getInformationDisplay(ItemStack stack, List list) { // Do Nothing } /** Attempt to cause a mutation **/ public int[] attemptMutation(int parent1dna, int parent2dna) { int[] ret = new int[2]; ret[0] = parent1dna; ret[1] = parent2dna; return ret; } /** return a list of these based on the dominance, dominant goes first **/ public int[] getDominant(int option1, int option2, Random rand) { int[] ret = new int[2]; ret[0] = option1; ret[1] = option2; return ret; } /** Swaps the dominance **/ public int[] swapDominance(int dominance1, int dominance2, int option1, int option2, Random rand) { int[] array = new int[2]; if (dominance1 == dominance2) { if (rand.nextInt(2) == 0) { array[0] = option1; array[1] = option2; } else { array[0] = option2; array[1] = option1; } } else if (dominance1 < dominance2) { array[0] = option1; array[1] = option2; } else { array[0] = option2; array[1] = option1; } return array; } /** * return the data needed for this piece of dna if it's coming from the * species file **/ public Integer getDNAFromSpecies(FishSpecies species) { return -1; } //Everything below this point is mostly irrelevant when adding new dna except in special cases /** Automatically called when generating a fish **/ public ItemStack addDNA(ItemStack stack, Integer data) { if (!stack.hasTagCompound()) { stack.setTagCompound(new NBTTagCompound()); } if (category == null) { stack.stackTagCompound.setInteger(getHigherString(), data); } else { NBTTagCompound tag = stack.stackTagCompound.getCompoundTag(category); if (tag == null) { tag = new NBTTagCompound(); } tag.setInteger(getHigherString(), data); stack.stackTagCompound.setTag(category, tag); } return stack; } /** Automatically called when generating a fish **/ public ItemStack addLowerDNA(ItemStack stack, Integer data) { if (!stack.hasTagCompound()) { stack.setTagCompound(new NBTTagCompound()); } if (category == null) { stack.stackTagCompound.setInteger(getLowerString(), data); } else { NBTTagCompound tag = stack.stackTagCompound.getCompoundTag(category); if (tag == null) { tag = new NBTTagCompound(); } tag.setInteger(getLowerString(), data); stack.stackTagCompound.setTag(category, tag); } return stack; } /** Automatically called when generating a fish **/ public void addDNAList(ItemStack stack, int[] data) { if (!stack.hasTagCompound()) { stack.setTagCompound(new NBTTagCompound()); } if (category == null) { stack.stackTagCompound.setIntArray(getEggString(), data); } else { NBTTagCompound tag = stack.stackTagCompound.getCompoundTag(category); if (tag == null) { tag = new NBTTagCompound(); } tag.setIntArray(getEggString(), data); stack.stackTagCompound.setTag(category, tag); } } /** Automatically checks if the egg has the dna it should have **/ public boolean hasEggData(ItemStack egg) { if (category == null) return egg.stackTagCompound.hasKey(getEggString()); else { NBTTagCompound tag = egg.stackTagCompound.getCompoundTag(category); if (tag == null) { tag = new NBTTagCompound(); } return tag.hasKey(getEggString()); } } /** Automatically called when reading a fish **/ public Integer getDNA(ItemStack stack) { if (stack == null || !stack.hasTagCompound()) return 0; if (category == null) { if (!stack.stackTagCompound.hasKey(getHigherString()) && stack.stackTagCompound.hasKey("SpeciesID")) { FishSpecies species = Fishing.fishHelper.getSpecies(stack); addDNA(stack, getDNAFromSpecies(species)); } return stack.stackTagCompound.getInteger(getHigherString()); } else { NBTTagCompound tag = stack.stackTagCompound.getCompoundTag(category); if (tag == null) { tag = new NBTTagCompound(); } return tag.getInteger(getHigherString()); } } /** Automatically called when reading a fish **/ public Integer getLowerDNA(ItemStack stack) { if (stack == null || !stack.hasTagCompound()) return 0; if (category == null) { if (!stack.stackTagCompound.hasKey(getLowerString()) && stack.stackTagCompound.hasKey("lowerSpeciesID")) { FishSpecies species = Fishing.fishHelper.getSpecies(stack); addLowerDNA(stack, getDNAFromSpecies(species)); } return stack.stackTagCompound.getInteger(getLowerString()); } else { NBTTagCompound tag = stack.stackTagCompound.getCompoundTag(category); if (tag == null) { tag = new NBTTagCompound(); } return tag.getInteger(getLowerString()); } } /** Automatically called when reading a fish **/ public int[] getDNAList(ItemStack stack) { if (category == null) return stack.stackTagCompound.getIntArray(getEggString()); else { NBTTagCompound tag = stack.stackTagCompound.getCompoundTag(category); if (tag == null) { tag = new NBTTagCompound(); } return tag.getIntArray(getEggString()); } } /** Returns the display data for this dna type in the fish scanner, make sure you return a string with three length **/ public String[] getScannedDisplay(ItemStack stack) { return getScannedDisplay(stack, true); } public String[] getScannedDisplay(ItemStack stack, boolean numbers) { return new String[] { StatCollector.translateToLocal("mariculture.fish.data." + getName().toLowerCase()), "" + getDNA(stack), "" + getLowerDNA(stack) }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.odbc.odbc; import java.sql.ParameterMetaData; import java.sql.PreparedStatement; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cache.query.FieldsQueryCursor; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.binary.BinaryWriterExImpl; import org.apache.ignite.internal.binary.GridBinaryMarshaller; import org.apache.ignite.internal.processors.cache.query.SqlFieldsQueryEx; import org.apache.ignite.internal.processors.cache.QueryCursorImpl; import org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode; import org.apache.ignite.internal.processors.odbc.ClientListenerRequest; import org.apache.ignite.internal.processors.odbc.ClientListenerRequestHandler; import org.apache.ignite.internal.processors.odbc.ClientListenerResponse; import org.apache.ignite.internal.processors.odbc.odbc.escape.OdbcEscapeUtils; import org.apache.ignite.internal.processors.query.GridQueryIndexing; import org.apache.ignite.internal.processors.query.GridQueryTypeDescriptor; import org.apache.ignite.internal.util.GridSpinBusyLock; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.META_COLS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.META_PARAMS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.META_TBLS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.MORE_RESULTS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_CLOSE; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_EXEC; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_EXEC_BATCH; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_FETCH; /** * SQL query handler. */ public class OdbcRequestHandler implements ClientListenerRequestHandler { /** Query ID sequence. */ private static final AtomicLong QRY_ID_GEN = new AtomicLong(); /** Kernel context. */ private final GridKernalContext ctx; /** Logger. */ private final IgniteLogger log; /** Busy lock. */ private final GridSpinBusyLock busyLock; /** Maximum allowed cursors. */ private final int maxCursors; /** Current queries cursors. */ private final ConcurrentHashMap<Long, OdbcQueryResults> qryResults = new ConcurrentHashMap<>(); /** Distributed joins flag. */ private final boolean distributedJoins; /** Enforce join order flag. */ private final boolean enforceJoinOrder; /** Replicated only flag. */ private final boolean replicatedOnly; /** Collocated flag. */ private final boolean collocated; /** Lazy flag. */ private final boolean lazy; /** Update on server flag. */ private final boolean skipReducerOnUpdate; /** * Constructor. * @param ctx Context. * @param busyLock Shutdown latch. * @param maxCursors Maximum allowed cursors. * @param distributedJoins Distributed joins flag. * @param enforceJoinOrder Enforce join order flag. * @param replicatedOnly Replicated only flag. * @param collocated Collocated flag. * @param lazy Lazy flag. * @param skipReducerOnUpdate Skip reducer on update flag. */ public OdbcRequestHandler(GridKernalContext ctx, GridSpinBusyLock busyLock, int maxCursors, boolean distributedJoins, boolean enforceJoinOrder, boolean replicatedOnly, boolean collocated, boolean lazy, boolean skipReducerOnUpdate) { this.ctx = ctx; this.busyLock = busyLock; this.maxCursors = maxCursors; this.distributedJoins = distributedJoins; this.enforceJoinOrder = enforceJoinOrder; this.replicatedOnly = replicatedOnly; this.collocated = collocated; this.lazy = lazy; this.skipReducerOnUpdate = skipReducerOnUpdate; log = ctx.log(getClass()); } /** {@inheritDoc} */ @Override public ClientListenerResponse handle(ClientListenerRequest req0) { assert req0 != null; OdbcRequest req = (OdbcRequest)req0; if (!busyLock.enterBusy()) return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Failed to handle ODBC request because node is stopping: " + req); try { switch (req.command()) { case QRY_EXEC: return executeQuery((OdbcQueryExecuteRequest)req); case QRY_EXEC_BATCH: return executeBatchQuery((OdbcQueryExecuteBatchRequest)req); case QRY_FETCH: return fetchQuery((OdbcQueryFetchRequest)req); case QRY_CLOSE: return closeQuery((OdbcQueryCloseRequest)req); case META_COLS: return getColumnsMeta((OdbcQueryGetColumnsMetaRequest)req); case META_TBLS: return getTablesMeta((OdbcQueryGetTablesMetaRequest)req); case META_PARAMS: return getParamsMeta((OdbcQueryGetParamsMetaRequest)req); case MORE_RESULTS: return moreResults((OdbcQueryMoreResultsRequest)req); } return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Unsupported ODBC request: " + req); } finally { busyLock.leaveBusy(); } } /** {@inheritDoc} */ @Override public ClientListenerResponse handleException(Exception e, ClientListenerRequest req) { return exceptionToResult(e); } /** {@inheritDoc} */ @Override public void writeHandshake(BinaryWriterExImpl writer) { writer.writeBoolean(true); } /** * Called whenever client is disconnected due to correct connection close * or due to {@code IOException} during network operations. */ public void onDisconnect() { if (busyLock.enterBusy()) { try { for (OdbcQueryResults res : qryResults.values()) res.closeAll(); } finally { busyLock.leaveBusy(); } } } /** * Make query considering handler configuration. * @param schema Schema. * @param sql SQL request. * @param args Arguments. * @return Query instance. */ private SqlFieldsQueryEx makeQuery(String schema, String sql, Object[] args) { SqlFieldsQueryEx qry = new SqlFieldsQueryEx(sql, null); qry.setArgs(args); qry.setDistributedJoins(distributedJoins); qry.setEnforceJoinOrder(enforceJoinOrder); qry.setReplicatedOnly(replicatedOnly); qry.setCollocated(collocated); qry.setLazy(lazy); qry.setSchema(schema); qry.setSkipReducerOnUpdate(skipReducerOnUpdate); return qry; } /** * {@link OdbcQueryExecuteRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse executeQuery(OdbcQueryExecuteRequest req) { int cursorCnt = qryResults.size(); if (maxCursors > 0 && cursorCnt >= maxCursors) return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Too many open cursors (either close " + "other open cursors or increase the limit through " + "ClientConnectorConfiguration.maxOpenCursorsPerConnection) [maximum=" + maxCursors + ", current=" + cursorCnt + ']'); long qryId = QRY_ID_GEN.getAndIncrement(); try { String sql = OdbcEscapeUtils.parse(req.sqlQuery()); if (log.isDebugEnabled()) log.debug("ODBC query parsed [reqId=" + req.requestId() + ", original=" + req.sqlQuery() + ", parsed=" + sql + ']'); SqlFieldsQuery qry = makeQuery(req.schema(), sql, req.arguments()); List<FieldsQueryCursor<List<?>>> cursors = ctx.query().querySqlFieldsNoCache(qry, true, false); OdbcQueryResults results = new OdbcQueryResults(cursors); if (!results.hasUnfetchedRows()) results.closeAll(); else qryResults.put(qryId, results); OdbcQueryExecuteResult res = new OdbcQueryExecuteResult(qryId, results.currentResultSet().fieldsMeta(), results.rowsAffected()); return new OdbcResponse(res); } catch (Exception e) { qryResults.remove(qryId); U.error(log, "Failed to execute SQL query [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryExecuteBatchRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse executeBatchQuery(OdbcQueryExecuteBatchRequest req) { List<Long> rowsAffected = new ArrayList<>(req.arguments().length); int currentSet = 0; try { String sql = OdbcEscapeUtils.parse(req.sqlQuery()); if (log.isDebugEnabled()) log.debug("ODBC query parsed [reqId=" + req.requestId() + ", original=" + req.sqlQuery() + ", parsed=" + sql + ']'); SqlFieldsQuery qry = makeQuery(req.schema(), sql, req.arguments()); Object[][] paramSet = req.arguments(); if (paramSet.length <= 0) throw new IgniteException("Batch execute request with non-positive batch length. [len=" + paramSet.length + ']'); // Getting meta and do the checks for the first execution. qry.setArgs(paramSet[0]); QueryCursorImpl<List<?>> qryCur = (QueryCursorImpl<List<?>>)ctx.query().querySqlFieldsNoCache(qry, true); if (qryCur.isQuery()) throw new IgniteException("Batching of parameters only supported for DML statements. [query=" + req.sqlQuery() + ']'); rowsAffected.add(OdbcUtils.rowsAffected(qryCur)); for (currentSet = 1; currentSet < paramSet.length; ++currentSet) rowsAffected.add(executeQuery(qry, paramSet[currentSet])); OdbcQueryExecuteBatchResult res = new OdbcQueryExecuteBatchResult(rowsAffected); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to execute SQL query [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToBatchResult(e, rowsAffected, currentSet); } } /** * Execute query. * @param qry Query * @param row Row * @return Affected rows. */ private long executeQuery(SqlFieldsQuery qry, Object[] row) { qry.setArgs(row); QueryCursor<List<?>> cur = ctx.query().querySqlFieldsNoCache(qry, true); return OdbcUtils.rowsAffected(cur); } /** * {@link OdbcQueryCloseRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse closeQuery(OdbcQueryCloseRequest req) { long queryId = req.queryId(); try { OdbcQueryResults results = qryResults.get(queryId); if (results == null) return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Failed to find query with ID: " + queryId); CloseCursor(results, queryId); OdbcQueryCloseResult res = new OdbcQueryCloseResult(queryId); return new OdbcResponse(res); } catch (Exception e) { qryResults.remove(queryId); U.error(log, "Failed to close SQL query [reqId=" + req.requestId() + ", req=" + queryId + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryFetchRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse fetchQuery(OdbcQueryFetchRequest req) { try { long queryId = req.queryId(); OdbcQueryResults results = qryResults.get(queryId); if (results == null) return new OdbcResponse(ClientListenerResponse.STATUS_FAILED, "Failed to find query with ID: " + queryId); OdbcResultSet set = results.currentResultSet(); List<Object> items = set.fetch(req.pageSize()); boolean lastPage = !set.hasUnfetchedRows(); // Automatically closing cursor if no more data is available. if (!results.hasUnfetchedRows()) CloseCursor(results, queryId); OdbcQueryFetchResult res = new OdbcQueryFetchResult(queryId, items, lastPage); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to fetch SQL query result [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryGetColumnsMetaRequest} command handler. * * @param req Get columns metadata request. * @return Response. */ private ClientListenerResponse getColumnsMeta(OdbcQueryGetColumnsMetaRequest req) { try { List<OdbcColumnMeta> meta = new ArrayList<>(); String schemaPattern; String tablePattern; if (req.tablePattern().contains(".")) { // Parsing two-part table name. String[] parts = req.tablePattern().split("\\."); schemaPattern = OdbcUtils.removeQuotationMarksIfNeeded(parts[0]); tablePattern = parts[1]; } else { schemaPattern = OdbcUtils.removeQuotationMarksIfNeeded(req.schemaPattern()); tablePattern = req.tablePattern(); } GridQueryIndexing indexing = ctx.query().getIndexing(); for (String cacheName : ctx.cache().cacheNames()) { String cacheSchema = indexing.schema(cacheName); if (!matches(cacheSchema, schemaPattern)) continue; Collection<GridQueryTypeDescriptor> tablesMeta = ctx.query().types(cacheName); for (GridQueryTypeDescriptor table : tablesMeta) { if (!matches(table.name(), tablePattern)) continue; for (Map.Entry<String, Class<?>> field : table.fields().entrySet()) { if (!matches(field.getKey(), req.columnPattern())) continue; OdbcColumnMeta columnMeta = new OdbcColumnMeta(cacheSchema, table.name(), field.getKey(), field.getValue()); if (!meta.contains(columnMeta)) meta.add(columnMeta); } } } OdbcQueryGetColumnsMetaResult res = new OdbcQueryGetColumnsMetaResult(meta); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get columns metadata [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryGetTablesMetaRequest} command handler. * * @param req Get tables metadata request. * @return Response. */ private ClientListenerResponse getTablesMeta(OdbcQueryGetTablesMetaRequest req) { try { List<OdbcTableMeta> meta = new ArrayList<>(); String schemaPattern = OdbcUtils.removeQuotationMarksIfNeeded(req.schema()); GridQueryIndexing indexing = ctx.query().getIndexing(); for (String cacheName : ctx.cache().cacheNames()) { String cacheSchema = indexing.schema(cacheName); if (!matches(cacheSchema, schemaPattern)) continue; Collection<GridQueryTypeDescriptor> tablesMeta = ctx.query().types(cacheName); for (GridQueryTypeDescriptor table : tablesMeta) { if (!matches(table.name(), req.table())) continue; if (!matches("TABLE", req.tableType())) continue; OdbcTableMeta tableMeta = new OdbcTableMeta(null, cacheName, table.name(), "TABLE"); if (!meta.contains(tableMeta)) meta.add(tableMeta); } } OdbcQueryGetTablesMetaResult res = new OdbcQueryGetTablesMetaResult(meta); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get tables metadata [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryGetParamsMetaRequest} command handler. * * @param req Get params metadata request. * @return Response. */ private ClientListenerResponse getParamsMeta(OdbcQueryGetParamsMetaRequest req) { try { PreparedStatement stmt = ctx.query().getIndexing().prepareNativeStatement(req.schema(), req.query()); ParameterMetaData pmd = stmt.getParameterMetaData(); byte[] typeIds = new byte[pmd.getParameterCount()]; for (int i = 1; i <= pmd.getParameterCount(); ++i) { int sqlType = pmd.getParameterType(i); typeIds[i - 1] = sqlTypeToBinary(sqlType); } OdbcQueryGetParamsMetaResult res = new OdbcQueryGetParamsMetaResult(typeIds); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get params metadata [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryMoreResultsRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse moreResults(OdbcQueryMoreResultsRequest req) { try { long queryId = req.queryId(); OdbcQueryResults results = qryResults.get(queryId); if (results == null) return new OdbcResponse(ClientListenerResponse.STATUS_FAILED, "Failed to find query with ID: " + queryId); results.nextResultSet(); OdbcResultSet set = results.currentResultSet(); List<Object> items = set.fetch(req.pageSize()); boolean lastPage = !set.hasUnfetchedRows(); // Automatically closing cursor if no more data is available. if (!results.hasUnfetchedRows()) CloseCursor(results, queryId); OdbcQueryMoreResultsResult res = new OdbcQueryMoreResultsResult(queryId, items, lastPage); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get more SQL query results [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * Close cursor. * @param results Query map element. * @param queryId Query ID. */ private void CloseCursor(OdbcQueryResults results, long queryId) { assert(results != null); results.closeAll(); qryResults.remove(queryId); } /** * Convert {@link java.sql.Types} to binary type constant (See {@link GridBinaryMarshaller} constants). * * @param sqlType SQL type. * @return Binary type. */ private static byte sqlTypeToBinary(int sqlType) { switch (sqlType) { case Types.BIGINT: return GridBinaryMarshaller.LONG; case Types.BOOLEAN: return GridBinaryMarshaller.BOOLEAN; case Types.DATE: return GridBinaryMarshaller.DATE; case Types.DOUBLE: return GridBinaryMarshaller.DOUBLE; case Types.FLOAT: case Types.REAL: return GridBinaryMarshaller.FLOAT; case Types.NUMERIC: case Types.DECIMAL: return GridBinaryMarshaller.DECIMAL; case Types.INTEGER: return GridBinaryMarshaller.INT; case Types.SMALLINT: return GridBinaryMarshaller.SHORT; case Types.TIME: return GridBinaryMarshaller.TIME; case Types.TIMESTAMP: return GridBinaryMarshaller.TIMESTAMP; case Types.TINYINT: return GridBinaryMarshaller.BYTE; case Types.CHAR: case Types.VARCHAR: case Types.LONGNVARCHAR: return GridBinaryMarshaller.STRING; case Types.NULL: return GridBinaryMarshaller.NULL; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: default: return GridBinaryMarshaller.BYTE_ARR; } } /** * Checks whether string matches SQL pattern. * * @param str String. * @param ptrn Pattern. * @return Whether string matches pattern. */ private static boolean matches(String str, String ptrn) { return str != null && (F.isEmpty(ptrn) || str.toUpperCase().matches(ptrn.toUpperCase().replace("%", ".*").replace("_", "."))); } /** * Create {@link OdbcResponse} bearing appropriate Ignite specific result code if possible * from given {@link Exception}. * * @param e Exception to convert. * @return resulting {@link OdbcResponse}. */ private OdbcResponse exceptionToBatchResult(Exception e, Collection<Long> rowsAffected, long currentSet) { OdbcQueryExecuteBatchResult res = new OdbcQueryExecuteBatchResult(rowsAffected, currentSet, OdbcUtils.tryRetrieveSqlErrorCode(e), OdbcUtils.tryRetrieveH2ErrorMessage(e)); return new OdbcResponse(res); } /** * Create {@link OdbcResponse} bearing appropriate Ignite specific result code if possible * from given {@link Exception}. * * @param e Exception to convert. * @return resulting {@link OdbcResponse}. */ private OdbcResponse exceptionToResult(Exception e) { return new OdbcResponse(OdbcUtils.tryRetrieveSqlErrorCode(e), OdbcUtils.tryRetrieveH2ErrorMessage(e)); } }
/* * Copyright (C) 2010 Moduad Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.androidpn.client; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.Future; import org.jivesoftware.smack.Chat; import org.jivesoftware.smack.ConnectionConfiguration; import org.jivesoftware.smack.ConnectionConfiguration.SecurityMode; import org.jivesoftware.smack.ConnectionListener; import org.jivesoftware.smack.MessageListener; import org.jivesoftware.smack.PacketListener; import org.jivesoftware.smack.XMPPConnection; import org.jivesoftware.smack.XMPPException; import org.jivesoftware.smack.filter.AndFilter; import org.jivesoftware.smack.filter.PacketFilter; import org.jivesoftware.smack.filter.PacketIDFilter; import org.jivesoftware.smack.filter.PacketTypeFilter; import org.jivesoftware.smack.packet.IQ; import org.jivesoftware.smack.packet.Message; import org.jivesoftware.smack.packet.Packet; import org.jivesoftware.smack.packet.Registration; import org.jivesoftware.smack.provider.ProviderManager; import android.content.Context; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.os.Handler; import android.util.Log; /** * This class is to manage the XMPP connection between client and server. * * @author Sehwan Noh (devnoh@gmail.com) */ public class XmppManager { private static final String LOGTAG = LogUtil.makeLogTag(XmppManager.class); private static final String XMPP_RESOURCE_NAME = "AndroidpnClient"; private Context context; private NotificationService.TaskSubmitter taskSubmitter; private NotificationService.TaskTracker taskTracker; private SharedPreferences sharedPrefs; private String xmppHost; private int xmppPort; private XMPPConnection connection; private String username; private String password; private ConnectionListener connectionListener; private PacketListener notificationPacketListener; private Handler handler; private List<Runnable> taskList; private boolean running = false; private Future<?> futureTask; private Thread reconnection; public XmppManager(NotificationService notificationService) { context = notificationService; taskSubmitter = notificationService.getTaskSubmitter(); taskTracker = notificationService.getTaskTracker(); sharedPrefs = notificationService.getSharedPreferences(); xmppHost = sharedPrefs.getString(Constants.XMPP_HOST, "localhost"); xmppPort = sharedPrefs.getInt(Constants.XMPP_PORT, 5222); username = sharedPrefs.getString(Constants.XMPP_USERNAME, ""); password = sharedPrefs.getString(Constants.XMPP_PASSWORD, ""); connectionListener = new PersistentConnectionListener(this); notificationPacketListener = new NotificationPacketListener(this); handler = new Handler(); taskList = new ArrayList<Runnable>(); reconnection = new ReconnectionThread(this); } public Context getContext() { return context; } public void connect() { Log.d(LOGTAG, "connect()..."); submitLoginTask(); // submitSendTask(); } public void disconnect() { Log.d(LOGTAG, "disconnect()..."); terminatePersistentConnection(); } public void terminatePersistentConnection() { Log.d(LOGTAG, "terminatePersistentConnection()..."); Runnable runnable = new Runnable() { final XmppManager xmppManager = XmppManager.this; public void run() { if (xmppManager.isConnected()) { Log.d(LOGTAG, "terminatePersistentConnection()... run()"); xmppManager.getConnection().removePacketListener( xmppManager.getNotificationPacketListener()); xmppManager.getConnection().disconnect(); } xmppManager.runTask(); } }; addTask(runnable); } public XMPPConnection getConnection() { return connection; } public void setConnection(XMPPConnection connection) { this.connection = connection; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public ConnectionListener getConnectionListener() { return connectionListener; } public PacketListener getNotificationPacketListener() { return notificationPacketListener; } public void startReconnectionThread() { synchronized (reconnection) { if (!reconnection.isAlive()) { reconnection.setName("Xmpp Reconnection Thread"); reconnection.start(); } } } public Handler getHandler() { return handler; } public void reregisterAccount() { removeAccount(); submitLoginTask(); runTask(); } public List<Runnable> getTaskList() { return taskList; } public Future<?> getFutureTask() { return futureTask; } public void runTask() { Log.d(LOGTAG, "runTask()..."); String user = connection.getUser(); Log.v("Kite", "xmpp userName is " + user); synchronized (taskList) { running = false; futureTask = null; if (!taskList.isEmpty()) { Runnable runnable = (Runnable) taskList.get(0); taskList.remove(0); running = true; futureTask = taskSubmitter.submit(runnable); if (futureTask == null) { taskTracker.decrease(); } } } taskTracker.decrease(); Log.d(LOGTAG, "runTask()...done"); } private String newRandomUUID() { String uuidRaw = UUID.randomUUID().toString(); return uuidRaw.replaceAll("-", ""); } private boolean isConnected() { return connection != null && connection.isConnected(); } private boolean isAuthenticated() { return connection != null && connection.isConnected() && connection.isAuthenticated(); } private boolean isRegistered() { return sharedPrefs.contains(Constants.XMPP_USERNAME) && sharedPrefs.contains(Constants.XMPP_PASSWORD); } private void submitConnectTask() { Log.d(LOGTAG, "submitConnectTask()..."); addTask(new ConnectTask()); } private void submitRegisterTask() { Log.d(LOGTAG, "submitRegisterTask()..."); submitConnectTask(); addTask(new RegisterTask()); } private void submitLoginTask() { Log.d(LOGTAG, "submitLoginTask()..."); submitRegisterTask(); addTask(new LoginTask()); } public void submitSendTask() { Log.d(LOGTAG, "submitSendTask()..."); submitLoginTask(); addTask(new SendTask()); } private void addTask(Runnable runnable) { Log.d(LOGTAG, "addTask(runnable)..."); taskTracker.increase(); synchronized (taskList) { if (taskList.isEmpty() && !running) { running = true; futureTask = taskSubmitter.submit(runnable); if (futureTask == null) { taskTracker.decrease(); } } else { taskList.add(runnable); } } Log.d(LOGTAG, "addTask(runnable)... done"); } private void removeAccount() { Editor editor = sharedPrefs.edit(); editor.remove(Constants.XMPP_USERNAME); editor.remove(Constants.XMPP_PASSWORD); editor.commit(); } /** * A runnable task to connect the server. */ private class ConnectTask implements Runnable { final XmppManager xmppManager; private ConnectTask() { this.xmppManager = XmppManager.this; } public void run() { Log.i(LOGTAG, "ConnectTask.run()..."); if (!xmppManager.isConnected()) { // Create the configuration for this new connection ConnectionConfiguration connConfig = new ConnectionConfiguration( xmppHost, xmppPort); // connConfig.setSecurityMode(SecurityMode.disabled); connConfig.setSecurityMode(SecurityMode.required); connConfig.setSASLAuthenticationEnabled(false); connConfig.setCompressionEnabled(false); XMPPConnection connection = new XMPPConnection(connConfig); xmppManager.setConnection(connection); try { // Connect to the server connection.connect(); Log.i(LOGTAG, "XMPP connected successfully"); // packet provider ProviderManager.getInstance().addIQProvider("notification", "androidpn:iq:notification", new NotificationIQProvider()); } catch (XMPPException e) { Log.e(LOGTAG, "XMPP connection failed", e); } xmppManager.runTask(); } else { Log.i(LOGTAG, "XMPP connected already"); xmppManager.runTask(); } } } /** * A runnable task to register a new user onto the server. */ private class RegisterTask implements Runnable { final XmppManager xmppManager; private RegisterTask() { xmppManager = XmppManager.this; } public void run() { Log.i(LOGTAG, "RegisterTask.run()..."); if (!xmppManager.isRegistered()) { final String newUsername = newRandomUUID(); final String newPassword = newRandomUUID(); Registration registration = new Registration(); PacketFilter packetFilter = new AndFilter(new PacketIDFilter( registration.getPacketID()), new PacketTypeFilter( IQ.class)); PacketListener packetListener = new PacketListener() { public void processPacket(Packet packet) { Log.d("RegisterTask.PacketListener", "processPacket()....."); Log.d("RegisterTask.PacketListener", "packet=" + packet.toXML()); if (packet instanceof IQ) { IQ response = (IQ) packet; if (response.getType() == IQ.Type.ERROR) { if (!response.getError().toString().contains( "409")) { Log.e(LOGTAG, "Unknown error while registering XMPP account! " + response.getError() .getCondition()); } } else if (response.getType() == IQ.Type.RESULT) { xmppManager.setUsername(newUsername); xmppManager.setPassword(newPassword); Log.d(LOGTAG, "username=" + newUsername); Log.d(LOGTAG, "password=" + newPassword); Editor editor = sharedPrefs.edit(); editor.putString(Constants.XMPP_USERNAME, newUsername); editor.putString(Constants.XMPP_PASSWORD, newPassword); editor.commit(); Log .i(LOGTAG, "Account registered successfully"); xmppManager.runTask(); } } } }; connection.addPacketListener(packetListener, packetFilter); registration.setType(IQ.Type.SET); // registration.setTo(xmppHost); // Map<String, String> attributes = new HashMap<String, String>(); // attributes.put("username", rUsername); // attributes.put("password", rPassword); // registration.setAttributes(attributes); registration.addAttribute("username", newUsername); registration.addAttribute("password", newPassword); connection.sendPacket(registration); } else { Log.i(LOGTAG, "Account registered already"); xmppManager.runTask(); } } } /** * A runnable task to log into the server. */ private class LoginTask implements Runnable { final XmppManager xmppManager; private LoginTask() { this.xmppManager = XmppManager.this; } public void run() { Log.i(LOGTAG, "LoginTask.run()..."); if (!xmppManager.isAuthenticated()) { Log.d(LOGTAG, "username=" + username); Log.d(LOGTAG, "password=" + password); try { xmppManager.getConnection().login( xmppManager.getUsername(), xmppManager.getPassword(), XMPP_RESOURCE_NAME); Log.d(LOGTAG, "Loggedn in successfully"); // connection listener if (xmppManager.getConnectionListener() != null) { xmppManager.getConnection().addConnectionListener( xmppManager.getConnectionListener()); } // packet filter PacketFilter packetFilter = new PacketTypeFilter( NotificationIQ.class); // packet listener PacketListener packetListener = xmppManager .getNotificationPacketListener(); connection.addPacketListener(packetListener, packetFilter); xmppManager.runTask(); } catch (XMPPException e) { Log.e(LOGTAG, "LoginTask.run()... xmpp error"); Log.e(LOGTAG, "Failed to login to xmpp server. Caused by: " + e.getMessage()); String INVALID_CREDENTIALS_ERROR_CODE = "401"; String errorMessage = e.getMessage(); if (errorMessage != null && errorMessage .contains(INVALID_CREDENTIALS_ERROR_CODE)) { xmppManager.reregisterAccount(); return; } xmppManager.startReconnectionThread(); } catch (Exception e) { Log.e(LOGTAG, "LoginTask.run()... other error"); Log.e(LOGTAG, "Failed to login to xmpp server. Caused by: " + e.getMessage()); xmppManager.startReconnectionThread(); } } else { Log.i(LOGTAG, "Logged in already"); xmppManager.runTask(); } } } private class SendTask implements Runnable { final XmppManager xmppManager; private SendTask() { this.xmppManager = XmppManager.this; } public void run() { Log.i(LOGTAG, "SendTask.run()..."); Message message = new Message(); message.setBody("sent from client"); connection.sendPacket(message); xmppManager.runTask(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.tinkergraph.structure; import org.apache.commons.configuration2.BaseConfiguration; import org.apache.commons.configuration2.Configuration; import org.apache.tinkerpop.gremlin.process.computer.GraphComputer; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.Transaction; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.structure.io.Io; import org.apache.tinkerpop.gremlin.structure.io.IoCore; import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion; import org.apache.tinkerpop.gremlin.structure.io.gryo.GryoVersion; import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; import org.apache.tinkerpop.gremlin.structure.util.GraphFactory; import org.apache.tinkerpop.gremlin.structure.util.StringFactory; import org.apache.tinkerpop.gremlin.tinkergraph.process.computer.TinkerGraphComputer; import org.apache.tinkerpop.gremlin.tinkergraph.process.computer.TinkerGraphComputerView; import org.apache.tinkerpop.gremlin.tinkergraph.process.traversal.strategy.optimization.TinkerGraphCountStrategy; import org.apache.tinkerpop.gremlin.tinkergraph.process.traversal.strategy.optimization.TinkerGraphStepStrategy; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Stream; /** * An in-memory (with optional persistence on calls to {@link #close()}), reference implementation of the property * graph interfaces provided by TinkerPop. * * @author Marko A. Rodriguez (http://markorodriguez.com) * @author Stephen Mallette (http://stephen.genoprime.com) */ @Graph.OptIn(Graph.OptIn.SUITE_STRUCTURE_STANDARD) @Graph.OptIn(Graph.OptIn.SUITE_STRUCTURE_INTEGRATE) @Graph.OptIn(Graph.OptIn.SUITE_PROCESS_STANDARD) @Graph.OptIn(Graph.OptIn.SUITE_PROCESS_COMPUTER) public final class TinkerGraph implements Graph { static { TraversalStrategies.GlobalCache.registerStrategies(TinkerGraph.class, TraversalStrategies.GlobalCache.getStrategies(Graph.class).clone().addStrategies( TinkerGraphStepStrategy.instance(), TinkerGraphCountStrategy.instance())); } private static final Configuration EMPTY_CONFIGURATION = new BaseConfiguration() {{ this.setProperty(Graph.GRAPH, TinkerGraph.class.getName()); }}; public static final String GREMLIN_TINKERGRAPH_VERTEX_ID_MANAGER = "gremlin.tinkergraph.vertexIdManager"; public static final String GREMLIN_TINKERGRAPH_EDGE_ID_MANAGER = "gremlin.tinkergraph.edgeIdManager"; public static final String GREMLIN_TINKERGRAPH_VERTEX_PROPERTY_ID_MANAGER = "gremlin.tinkergraph.vertexPropertyIdManager"; public static final String GREMLIN_TINKERGRAPH_DEFAULT_VERTEX_PROPERTY_CARDINALITY = "gremlin.tinkergraph.defaultVertexPropertyCardinality"; public static final String GREMLIN_TINKERGRAPH_GRAPH_LOCATION = "gremlin.tinkergraph.graphLocation"; public static final String GREMLIN_TINKERGRAPH_GRAPH_FORMAT = "gremlin.tinkergraph.graphFormat"; private final TinkerGraphFeatures features = new TinkerGraphFeatures(); protected AtomicLong currentId = new AtomicLong(-1L); protected Map<Object, Vertex> vertices = new ConcurrentHashMap<>(); protected Map<Object, Edge> edges = new ConcurrentHashMap<>(); protected TinkerGraphVariables variables = null; protected TinkerGraphComputerView graphComputerView = null; protected TinkerIndex<TinkerVertex> vertexIndex = null; protected TinkerIndex<TinkerEdge> edgeIndex = null; protected final IdManager<?> vertexIdManager; protected final IdManager<?> edgeIdManager; protected final IdManager<?> vertexPropertyIdManager; protected final VertexProperty.Cardinality defaultVertexPropertyCardinality; private final Configuration configuration; private final String graphLocation; private final String graphFormat; /** * An empty private constructor that initializes {@link TinkerGraph}. */ private TinkerGraph(final Configuration configuration) { this.configuration = configuration; vertexIdManager = selectIdManager(configuration, GREMLIN_TINKERGRAPH_VERTEX_ID_MANAGER, Vertex.class); edgeIdManager = selectIdManager(configuration, GREMLIN_TINKERGRAPH_EDGE_ID_MANAGER, Edge.class); vertexPropertyIdManager = selectIdManager(configuration, GREMLIN_TINKERGRAPH_VERTEX_PROPERTY_ID_MANAGER, VertexProperty.class); defaultVertexPropertyCardinality = VertexProperty.Cardinality.valueOf( configuration.getString(GREMLIN_TINKERGRAPH_DEFAULT_VERTEX_PROPERTY_CARDINALITY, VertexProperty.Cardinality.single.name())); graphLocation = configuration.getString(GREMLIN_TINKERGRAPH_GRAPH_LOCATION, null); graphFormat = configuration.getString(GREMLIN_TINKERGRAPH_GRAPH_FORMAT, null); if ((graphLocation != null && null == graphFormat) || (null == graphLocation && graphFormat != null)) throw new IllegalStateException(String.format("The %s and %s must both be specified if either is present", GREMLIN_TINKERGRAPH_GRAPH_LOCATION, GREMLIN_TINKERGRAPH_GRAPH_FORMAT)); if (graphLocation != null) loadGraph(); } /** * Open a new {@link TinkerGraph} instance. * <p/> * <b>Reference Implementation Help:</b> If a {@link Graph} implementation does not require a {@code Configuration} * (or perhaps has a default configuration) it can choose to implement a zero argument * {@code open()} method. This is an optional constructor method for TinkerGraph. It is not enforced by the Gremlin * Test Suite. */ public static TinkerGraph open() { return open(EMPTY_CONFIGURATION); } /** * Open a new {@code TinkerGraph} instance. * <p/> * <b>Reference Implementation Help:</b> This method is the one use by the {@link GraphFactory} to instantiate * {@link Graph} instances. This method must be overridden for the Structure Test Suite to pass. Implementers have * latitude in terms of how exceptions are handled within this method. Such exceptions will be considered * implementation specific by the test suite as all test generate graph instances by way of * {@link GraphFactory}. As such, the exceptions get generalized behind that facade and since * {@link GraphFactory} is the preferred method to opening graphs it will be consistent at that level. * * @param configuration the configuration for the instance * @return a newly opened {@link Graph} */ public static TinkerGraph open(final Configuration configuration) { return new TinkerGraph(configuration); } ////////////// STRUCTURE API METHODS ////////////////// @Override public Vertex addVertex(final Object... keyValues) { ElementHelper.legalPropertyKeyValueArray(keyValues); Object idValue = vertexIdManager.convert(ElementHelper.getIdValue(keyValues).orElse(null)); final String label = ElementHelper.getLabelValue(keyValues).orElse(Vertex.DEFAULT_LABEL); if (null != idValue) { if (this.vertices.containsKey(idValue)) throw Exceptions.vertexWithIdAlreadyExists(idValue); } else { idValue = vertexIdManager.getNextId(this); } final Vertex vertex = new TinkerVertex(idValue, label, this); this.vertices.put(vertex.id(), vertex); ElementHelper.attachProperties(vertex, VertexProperty.Cardinality.list, keyValues); return vertex; } @Override public <C extends GraphComputer> C compute(final Class<C> graphComputerClass) { if (!graphComputerClass.equals(TinkerGraphComputer.class)) throw Graph.Exceptions.graphDoesNotSupportProvidedGraphComputer(graphComputerClass); return (C) new TinkerGraphComputer(this); } @Override public GraphComputer compute() { return new TinkerGraphComputer(this); } @Override public Variables variables() { if (null == this.variables) this.variables = new TinkerGraphVariables(); return this.variables; } @Override public <I extends Io> I io(final Io.Builder<I> builder) { if (builder.requiresVersion(GryoVersion.V1_0) || builder.requiresVersion(GraphSONVersion.V1_0)) return (I) builder.graph(this).onMapper(mapper -> mapper.addRegistry(TinkerIoRegistryV1d0.instance())).create(); else if (builder.requiresVersion(GraphSONVersion.V2_0)) // there is no gryo v2 return (I) builder.graph(this).onMapper(mapper -> mapper.addRegistry(TinkerIoRegistryV2d0.instance())).create(); else return (I) builder.graph(this).onMapper(mapper -> mapper.addRegistry(TinkerIoRegistryV3d0.instance())).create(); } @Override public String toString() { return StringFactory.graphString(this, "vertices:" + this.vertices.size() + " edges:" + this.edges.size()); } public void clear() { this.vertices.clear(); this.edges.clear(); this.variables = null; this.currentId.set(-1L); this.vertexIndex = null; this.edgeIndex = null; this.graphComputerView = null; } /** * This method only has an effect if the {@link #GREMLIN_TINKERGRAPH_GRAPH_LOCATION} is set, in which case the * data in the graph is persisted to that location. This method may be called multiple times and does not release * resources. */ @Override public void close() { if (graphLocation != null) saveGraph(); } @Override public Transaction tx() { throw Exceptions.transactionsNotSupported(); } @Override public Configuration configuration() { return configuration; } @Override public Iterator<Vertex> vertices(final Object... vertexIds) { return createElementIterator(Vertex.class, vertices, vertexIdManager, vertexIds); } @Override public Iterator<Edge> edges(final Object... edgeIds) { return createElementIterator(Edge.class, edges, edgeIdManager, edgeIds); } private void loadGraph() { final File f = new File(graphLocation); if (f.exists() && f.isFile()) { try { if (graphFormat.equals("graphml")) { io(IoCore.graphml()).readGraph(graphLocation); } else if (graphFormat.equals("graphson")) { io(IoCore.graphson()).readGraph(graphLocation); } else if (graphFormat.equals("gryo")) { io(IoCore.gryo()).readGraph(graphLocation); } else { io(IoCore.createIoBuilder(graphFormat)).readGraph(graphLocation); } } catch (Exception ex) { throw new RuntimeException(String.format("Could not load graph at %s with %s", graphLocation, graphFormat), ex); } } } private void saveGraph() { final File f = new File(graphLocation); if (f.exists()) { f.delete(); } else { final File parent = f.getParentFile(); // the parent would be null in the case of an relative path if the graphLocation was simply: "f.gryo" if (parent != null && !parent.exists()) { parent.mkdirs(); } } try { if (graphFormat.equals("graphml")) { io(IoCore.graphml()).writeGraph(graphLocation); } else if (graphFormat.equals("graphson")) { io(IoCore.graphson()).writeGraph(graphLocation); } else if (graphFormat.equals("gryo")) { io(IoCore.gryo()).writeGraph(graphLocation); } else { io(IoCore.createIoBuilder(graphFormat)).writeGraph(graphLocation); } } catch (Exception ex) { throw new RuntimeException(String.format("Could not save graph at %s with %s", graphLocation, graphFormat), ex); } } private <T extends Element> Iterator<T> createElementIterator(final Class<T> clazz, final Map<Object, T> elements, final IdManager idManager, final Object... ids) { final Iterator<T> iterator; if (0 == ids.length) { iterator = new TinkerGraphIterator<T>(elements.values().iterator()); } else { final List<Object> idList = Arrays.asList(ids); validateHomogenousIds(idList); // if the type is of Element - have to look each up because it might be an Attachable instance or // other implementation. the assumption is that id conversion is not required for detached // stuff - doesn't seem likely someone would detach a Titan vertex then try to expect that // vertex to be findable in OrientDB return clazz.isAssignableFrom(ids[0].getClass()) ? new TinkerGraphIterator<T>(IteratorUtils.filter(IteratorUtils.map(idList, id -> elements.get(clazz.cast(id).id())).iterator(), Objects::nonNull)) : new TinkerGraphIterator<T>(IteratorUtils.filter(IteratorUtils.map(idList, id -> elements.get(idManager.convert(id))).iterator(), Objects::nonNull)); } return TinkerHelper.inComputerMode(this) ? (Iterator<T>) (clazz.equals(Vertex.class) ? IteratorUtils.filter((Iterator<Vertex>) iterator, t -> this.graphComputerView.legalVertex(t)) : IteratorUtils.filter((Iterator<Edge>) iterator, t -> this.graphComputerView.legalEdge(t.outVertex(), t))) : iterator; } /** * Return TinkerGraph feature set. * <p/> * <b>Reference Implementation Help:</b> Implementers only need to implement features for which there are * negative or instance configured features. By default, all * {@link org.apache.tinkerpop.gremlin.structure.Graph.Features} return true. */ @Override public Features features() { return features; } private void validateHomogenousIds(final List<Object> ids) { final Iterator<Object> iterator = ids.iterator(); Object id = iterator.next(); if (id == null) throw Graph.Exceptions.idArgsMustBeEitherIdOrElement(); final Class firstClass = id.getClass(); while (iterator.hasNext()) { id = iterator.next(); if (id == null || !id.getClass().equals(firstClass)) throw Graph.Exceptions.idArgsMustBeEitherIdOrElement(); } } public class TinkerGraphFeatures implements Features { private final TinkerGraphGraphFeatures graphFeatures = new TinkerGraphGraphFeatures(); private final TinkerGraphEdgeFeatures edgeFeatures = new TinkerGraphEdgeFeatures(); private final TinkerGraphVertexFeatures vertexFeatures = new TinkerGraphVertexFeatures(); private TinkerGraphFeatures() { } @Override public GraphFeatures graph() { return graphFeatures; } @Override public EdgeFeatures edge() { return edgeFeatures; } @Override public VertexFeatures vertex() { return vertexFeatures; } @Override public String toString() { return StringFactory.featureString(this); } } public class TinkerGraphVertexFeatures implements Features.VertexFeatures { private final TinkerGraphVertexPropertyFeatures vertexPropertyFeatures = new TinkerGraphVertexPropertyFeatures(); private TinkerGraphVertexFeatures() { } @Override public Features.VertexPropertyFeatures properties() { return vertexPropertyFeatures; } @Override public boolean supportsCustomIds() { return false; } @Override public boolean willAllowId(final Object id) { return vertexIdManager.allow(id); } @Override public VertexProperty.Cardinality getCardinality(final String key) { return defaultVertexPropertyCardinality; } } public class TinkerGraphEdgeFeatures implements Features.EdgeFeatures { private TinkerGraphEdgeFeatures() { } @Override public boolean supportsCustomIds() { return false; } @Override public boolean willAllowId(final Object id) { return edgeIdManager.allow(id); } } public class TinkerGraphGraphFeatures implements Features.GraphFeatures { private TinkerGraphGraphFeatures() { } @Override public boolean supportsConcurrentAccess() { return false; } @Override public boolean supportsTransactions() { return false; } @Override public boolean supportsThreadedTransactions() { return false; } } public class TinkerGraphVertexPropertyFeatures implements Features.VertexPropertyFeatures { private TinkerGraphVertexPropertyFeatures() { } @Override public boolean supportsCustomIds() { return false; } @Override public boolean willAllowId(final Object id) { return vertexIdManager.allow(id); } } ///////////// GRAPH SPECIFIC INDEXING METHODS /////////////// /** * Create an index for said element class ({@link Vertex} or {@link Edge}) and said property key. * Whenever an element has the specified key mutated, the index is updated. * When the index is created, all existing elements are indexed to ensure that they are captured by the index. * * @param key the property key to index * @param elementClass the element class to index * @param <E> The type of the element class */ public <E extends Element> void createIndex(final String key, final Class<E> elementClass) { if (Vertex.class.isAssignableFrom(elementClass)) { if (null == this.vertexIndex) this.vertexIndex = new TinkerIndex<>(this, TinkerVertex.class); this.vertexIndex.createKeyIndex(key); } else if (Edge.class.isAssignableFrom(elementClass)) { if (null == this.edgeIndex) this.edgeIndex = new TinkerIndex<>(this, TinkerEdge.class); this.edgeIndex.createKeyIndex(key); } else { throw new IllegalArgumentException("Class is not indexable: " + elementClass); } } /** * Drop the index for the specified element class ({@link Vertex} or {@link Edge}) and key. * * @param key the property key to stop indexing * @param elementClass the element class of the index to drop * @param <E> The type of the element class */ public <E extends Element> void dropIndex(final String key, final Class<E> elementClass) { if (Vertex.class.isAssignableFrom(elementClass)) { if (null != this.vertexIndex) this.vertexIndex.dropKeyIndex(key); } else if (Edge.class.isAssignableFrom(elementClass)) { if (null != this.edgeIndex) this.edgeIndex.dropKeyIndex(key); } else { throw new IllegalArgumentException("Class is not indexable: " + elementClass); } } /** * Return all the keys currently being index for said element class ({@link Vertex} or {@link Edge}). * * @param elementClass the element class to get the indexed keys for * @param <E> The type of the element class * @return the set of keys currently being indexed */ public <E extends Element> Set<String> getIndexedKeys(final Class<E> elementClass) { if (Vertex.class.isAssignableFrom(elementClass)) { return null == this.vertexIndex ? Collections.emptySet() : this.vertexIndex.getIndexedKeys(); } else if (Edge.class.isAssignableFrom(elementClass)) { return null == this.edgeIndex ? Collections.emptySet() : this.edgeIndex.getIndexedKeys(); } else { throw new IllegalArgumentException("Class is not indexable: " + elementClass); } } /** * Construct an {@link TinkerGraph.IdManager} from the TinkerGraph {@code Configuration}. */ private static IdManager<?> selectIdManager(final Configuration config, final String configKey, final Class<? extends Element> clazz) { final String vertexIdManagerConfigValue = config.getString(configKey, DefaultIdManager.ANY.name()); try { return DefaultIdManager.valueOf(vertexIdManagerConfigValue); } catch (IllegalArgumentException iae) { try { return (IdManager) Class.forName(vertexIdManagerConfigValue).newInstance(); } catch (Exception ex) { throw new IllegalStateException(String.format("Could not configure TinkerGraph %s id manager with %s", clazz.getSimpleName(), vertexIdManagerConfigValue)); } } } /** * TinkerGraph will use an implementation of this interface to generate identifiers when a user does not supply * them and to handle identifier conversions when querying to provide better flexibility with respect to * handling different data types that mean the same thing. For example, the * {@link DefaultIdManager#LONG} implementation will allow {@code g.vertices(1l, 2l)} and * {@code g.vertices(1, 2)} to both return values. * * @param <T> the id type */ public interface IdManager<T> { /** * Generate an identifier which should be unique to the {@link TinkerGraph} instance. */ T getNextId(final TinkerGraph graph); /** * Convert an identifier to the type required by the manager. */ T convert(final Object id); /** * Determine if an identifier is allowed by this manager given its type. */ boolean allow(final Object id); } /** * A default set of {@link IdManager} implementations for common identifier types. */ public enum DefaultIdManager implements IdManager { /** * Manages identifiers of type {@code Long}. Will convert any class that extends from {@link Number} to a * {@link Long} and will also attempt to convert {@code String} values */ LONG { @Override public Long getNextId(final TinkerGraph graph) { return Stream.generate(() -> (graph.currentId.incrementAndGet())).filter(id -> !graph.vertices.containsKey(id) && !graph.edges.containsKey(id)).findAny().get(); } @Override public Object convert(final Object id) { if (null == id) return null; else if (id instanceof Long) return id; else if (id instanceof Number) return ((Number) id).longValue(); else if (id instanceof String) { try { return Long.parseLong((String) id); } catch (NumberFormatException nfe) { throw new IllegalArgumentException(createErrorMessage(Long.class, id)); } } else throw new IllegalArgumentException(createErrorMessage(Long.class, id)); } @Override public boolean allow(final Object id) { return id instanceof Number || id instanceof String; } }, /** * Manages identifiers of type {@code Integer}. Will convert any class that extends from {@link Number} to a * {@link Integer} and will also attempt to convert {@code String} values */ INTEGER { @Override public Integer getNextId(final TinkerGraph graph) { return Stream.generate(() -> (graph.currentId.incrementAndGet())).map(Long::intValue).filter(id -> !graph.vertices.containsKey(id) && !graph.edges.containsKey(id)).findAny().get(); } @Override public Object convert(final Object id) { if (null == id) return null; else if (id instanceof Integer) return id; else if (id instanceof Number) return ((Number) id).intValue(); else if (id instanceof String) { try { return Integer.parseInt((String) id); } catch (NumberFormatException nfe) { throw new IllegalArgumentException(createErrorMessage(Integer.class, id)); } } else throw new IllegalArgumentException(createErrorMessage(Integer.class, id)); } @Override public boolean allow(final Object id) { return id instanceof Number || id instanceof String; } }, /** * Manages identifiers of type {@link java.util.UUID}. Will convert {@code String} values to * {@link java.util.UUID}. */ UUID { @Override public UUID getNextId(final TinkerGraph graph) { return java.util.UUID.randomUUID(); } @Override public Object convert(final Object id) { if (null == id) return null; else if (id instanceof java.util.UUID) return id; else if (id instanceof String) { try { return java.util.UUID.fromString((String) id); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException(createErrorMessage(java.util.UUID.class, id)); } } else throw new IllegalArgumentException(createErrorMessage(java.util.UUID.class, id)); } @Override public boolean allow(final Object id) { return id instanceof UUID || id instanceof String; } }, /** * Manages identifiers of any type. This represents the default way {@link TinkerGraph} has always worked. * In other words, there is no identifier conversion so if the identifier of a vertex is a {@code Long}, then * trying to request it with an {@code Integer} will have no effect. Also, like the original * {@link TinkerGraph}, it will generate {@link Long} values for identifiers. */ ANY { @Override public Long getNextId(final TinkerGraph graph) { return Stream.generate(() -> (graph.currentId.incrementAndGet())).filter(id -> !graph.vertices.containsKey(id) && !graph.edges.containsKey(id)).findAny().get(); } @Override public Object convert(final Object id) { return id; } @Override public boolean allow(final Object id) { return true; } }; private static String createErrorMessage(final Class<?> expectedType, final Object id) { return String.format("Expected an id that is convertible to %s but received %s - [%s]", expectedType, id.getClass(), id); } } }
// Copyright (c) Microsoft Corporation. // All rights reserved. // // This code is licensed under the MIT License. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files(the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and / or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions : // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package com.microsoft.aad.automation.testapp; import android.content.Intent; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.text.TextUtils; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import com.microsoft.aad.adal.AuthenticationCallback; import com.microsoft.aad.adal.AuthenticationConstants; import com.microsoft.aad.adal.AuthenticationContext; import com.microsoft.aad.adal.AuthenticationException; import com.microsoft.aad.adal.AuthenticationResult; import com.microsoft.aad.adal.AuthenticationSettings; import com.microsoft.aad.adal.CacheKey; import com.microsoft.aad.adal.ITokenCacheStore; import com.microsoft.aad.adal.PromptBehavior; import com.microsoft.aad.adal.TokenCacheItem; import com.microsoft.aad.adal.UserInfo; import org.json.JSONException; import org.json.JSONObject; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.UUID; /** * Handle the coming request, will gather request info (JSON format of the data contains the authority, resource, clientId, * redirect, ect). */ public class SignInActivity extends AppCompatActivity { public static final String AUTHORITY = "authority"; public static final String RESOURCE = "resource"; public static final String CLIENT_ID = "client_id"; public static final String REDIRECT_URI = "redirect_uri"; public static final String USE_BROKER = "use_broker"; public static final String PROMPT_BEHAVIOR = "prompt_behavior"; public static final String EXTRA_QUERY_PARAM = "extra_qp"; public static final String VALIDATE_AUTHORITY = "validate_authority"; public static final String USER_IDENTIFIER = "user_identifier"; public static final String USER_IDENTIFIER_TYPE = "user_identifier_type"; public static final String CORRELATION_ID = "correlation_id"; static final String INVALID_REFRESH_TOKEN = "some invalid refresh token"; private TextView mTextView; private String mAuthority; private String mResource; private String mClientId; private String mRedirectUri; private boolean mUseBroker; private PromptBehavior mPromptBehavior; private String mLoginHint; private String mUserId; private String mExtraQueryParam; private AuthenticationContext mAuthenticationContext; private boolean mValidateAuthority; private UUID mCorrelationId; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_request); mTextView = (EditText) findViewById(R.id.requestInfo); final Button goButton = (Button) findViewById(R.id.requestGo); goButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { performAuthentication(); } }); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { mTextView.setText(""); mAuthenticationContext.onActivityResult(requestCode, resultCode, data); } private void performAuthentication() { final Intent receivedIntent = getIntent(); int flowCode = receivedIntent.getIntExtra(MainActivity.FLOW_CODE, 0); final Map<String, String> inputItems; try { inputItems = readAuthenticationInfo(); } catch (final JSONException e) { sendErrorToResultActivity(Constants.JSON_ERROR, "Unable to read the input JSON info " + e.getMessage()); return; } if (inputItems.isEmpty()) { return; } validateUserInput(inputItems, flowCode); setAuthenticationData(inputItems); AuthenticationSettings.INSTANCE.setUseBroker(mUseBroker); mAuthenticationContext = new AuthenticationContext(getApplicationContext(), mAuthority, mValidateAuthority); switch (flowCode) { case MainActivity.ACQUIRE_TOKEN: acquireToken(); break; case MainActivity.ACQUIRE_TOKEN_SILENT: acquireTokenSilent(); break; case MainActivity.INVALIDATE_ACCESS_TOKEN: processExpireAccessTokenRequest(); break; case MainActivity.INVALIDATE_REFRESH_TOKEN: processInvalidateRefreshTokenRequest(); break; case MainActivity.INVALIDATE_FAMILY_REFRESH_TOKEN: processInvalidateFamilyRefreshTokenRequest(); break; default: sendErrorToResultActivity("unknown_request", "Unknown request is received"); break; } } static Intent getErrorIntentForResultActivity(final String error, final String errorDescription) { final Intent intent = new Intent(); intent.putExtra(Constants.ERROR, error); intent.putExtra(Constants.ERROR_DESCRIPTION, errorDescription); return intent; } private void sendErrorToResultActivity(final String error, final String errorDescription) { launchResultActivity(getErrorIntentForResultActivity(error, errorDescription)); } private void processExpireAccessTokenRequest() { int count = expireAccessToken(); final Intent intent = new Intent(); intent.putExtra(Constants.EXPIRED_ACCESS_TOKEN_COUNT, String.valueOf(count)); launchResultActivity(intent); } private void processInvalidateRefreshTokenRequest() { int count = invalidateRefreshToken(); final Intent intent = new Intent(); intent.putExtra(Constants.INVALIDATED_REFRESH_TOKEN_COUNT, String.valueOf(count)); launchResultActivity(intent); } private void processInvalidateFamilyRefreshTokenRequest() { int count = invalidateFamilyRefreshToken(); final Intent intent = new Intent(); intent.putExtra(Constants.INVALIDATED_FAMILY_REFRESH_TOKEN_COUNT, String.valueOf(count)); launchResultActivity(intent); } private Map<String, String> readAuthenticationInfo() throws JSONException { final String userInputText = mTextView.getText().toString(); if (TextUtils.isEmpty(userInputText)) { // TODO: return error sendErrorToResultActivity("empty_requestInfo", "No user input for the request."); return Collections.emptyMap(); } // parse Json response final Map<String, String> inputItems = new HashMap<>(); extractJsonObjects(inputItems, userInputText); return inputItems; } private static void extractJsonObjects(Map<String, String> inputItems, String jsonStr) throws JSONException { final JSONObject jsonObject = new JSONObject(jsonStr); final Iterator<?> iterator = jsonObject.keys(); while (iterator.hasNext()) { final String key = (String) iterator.next(); inputItems.put(key, jsonObject.getString(key)); } } private void validateUserInput(final Map<String, String> inputItems, int flowCode) { if (inputItems.isEmpty()) { throw new IllegalArgumentException("No sign-in data typed in the textBox"); } if (TextUtils.isEmpty(inputItems.get(RESOURCE))) { throw new IllegalArgumentException("resource"); } if (TextUtils.isEmpty(inputItems.get(AUTHORITY))) { throw new IllegalArgumentException("authority"); } if (TextUtils.isEmpty(inputItems.get(CLIENT_ID))) { throw new IllegalArgumentException("clientId"); } if (flowCode == MainActivity.ACQUIRE_TOKEN && TextUtils.isEmpty(inputItems.get(REDIRECT_URI))) { throw new IllegalArgumentException("redirect_uri"); } if (flowCode == MainActivity.INVALIDATE_ACCESS_TOKEN && TextUtils.isEmpty(inputItems.get(USER_IDENTIFIER))) { throw new IllegalArgumentException("user identifier"); } } private void setAuthenticationData(final Map<String, String> inputItems) { mAuthority = inputItems.get(AUTHORITY); mResource = inputItems.get(RESOURCE); mRedirectUri = inputItems.get(REDIRECT_URI); mClientId = inputItems.get(CLIENT_ID); mUseBroker = inputItems.get(USE_BROKER) == null ? false : Boolean.valueOf(inputItems.get(USE_BROKER)); mPromptBehavior = getPromptBehavior(inputItems.get(PROMPT_BEHAVIOR)); mExtraQueryParam = inputItems.get(EXTRA_QUERY_PARAM); mValidateAuthority = inputItems.get(VALIDATE_AUTHORITY) == null ? true : Boolean.valueOf( inputItems.get(VALIDATE_AUTHORITY)); if (!TextUtils.isEmpty(inputItems.get("unique_id"))) { mUserId = inputItems.get("unique_id"); } if (!TextUtils.isEmpty(inputItems.get("displayable_id")) || !TextUtils.isEmpty(inputItems.get("user_identifier"))) { mLoginHint = inputItems.get("displayable_id") == null ? inputItems.get("user_identifier") : inputItems.get("displayable_id"); } final String correlationId = inputItems.get(CORRELATION_ID); if (!TextUtils.isEmpty(correlationId)) { mCorrelationId = UUID.fromString(correlationId); } } PromptBehavior getPromptBehavior(final String inputPromptBehaviorString) { if (TextUtils.isEmpty(inputPromptBehaviorString)) { return null; } if (inputPromptBehaviorString.equalsIgnoreCase(PromptBehavior.Always.toString())) { return PromptBehavior.Always; } else if (inputPromptBehaviorString.equalsIgnoreCase(PromptBehavior.Auto.toString())) { return PromptBehavior.Auto; } else if (inputPromptBehaviorString.equalsIgnoreCase(PromptBehavior.FORCE_PROMPT.toString())) { return PromptBehavior.FORCE_PROMPT; } else if (inputPromptBehaviorString.equalsIgnoreCase(PromptBehavior.REFRESH_SESSION.toString())) { return PromptBehavior.REFRESH_SESSION; } return null; } private void acquireToken() { mAuthenticationContext.acquireToken(SignInActivity.this, mResource, mClientId, mRedirectUri, mLoginHint, mPromptBehavior, mExtraQueryParam, getAdalCallback()); } private void acquireTokenSilent() { mAuthenticationContext.acquireTokenSilentAsync(mResource, mClientId, mUserId, getAdalCallback()); } private int expireAccessToken() { final ITokenCacheStore tokenCacheStore = mAuthenticationContext.getCache(); int count = 0; final String cacheKeyWithUserId = CacheKey.createCacheKeyForRTEntry(mAuthority, mResource, mClientId, mUserId); final TokenCacheItem itemWithUserId = tokenCacheStore.getItem(cacheKeyWithUserId); count += tokenExpired(itemWithUserId, cacheKeyWithUserId, tokenCacheStore); final String cacheKeyWithDisplayableId = CacheKey.createCacheKeyForRTEntry(mAuthority, mResource, mClientId, mLoginHint); final TokenCacheItem itemWithDisplayable = tokenCacheStore.getItem(cacheKeyWithDisplayableId); count += tokenExpired(itemWithDisplayable, cacheKeyWithDisplayableId, tokenCacheStore); final String cacheKeyWithNoUser = CacheKey.createCacheKeyForRTEntry(mAuthority, mResource, mClientId, ""); final TokenCacheItem itemWithNoUser = tokenCacheStore.getItem(cacheKeyWithNoUser); count += tokenExpired(itemWithNoUser, cacheKeyWithNoUser, tokenCacheStore); return count; } private int tokenExpired(final TokenCacheItem item, final String key, final ITokenCacheStore tokenCacheStore) { final Calendar calendar = new GregorianCalendar(); calendar.add(Calendar.HOUR, -2); final Date expiredTime = calendar.getTime(); if (item != null && !TokenCacheItem.isTokenExpired(item.getExpiresOn())) { item.setExpiresOn(expiredTime); tokenCacheStore.setItem(key, item); return 1; } return 0; } private int invalidateRefreshToken() { expireAccessToken(); int count = 0; // invalidate RT count += invalidateRefreshToken(CacheKey.createCacheKeyForRTEntry(mAuthority, mResource, mClientId, mUserId)); count += invalidateRefreshToken(CacheKey.createCacheKeyForRTEntry(mAuthority, mResource, mClientId, mLoginHint)); count += invalidateRefreshToken(CacheKey.createCacheKeyForRTEntry(mAuthority, mResource, mClientId, "")); // invalidate MRRT count += invalidateRefreshToken(CacheKey.createCacheKeyForMRRT(mAuthority, mClientId, mUserId)); count += invalidateRefreshToken(CacheKey.createCacheKeyForMRRT(mAuthority, mClientId, mLoginHint)); count += invalidateRefreshToken(CacheKey.createCacheKeyForMRRT(mAuthority, mClientId, "")); return count; } private int invalidateFamilyRefreshToken() { invalidateRefreshToken(); int count = 0; // invalidate FRT count += invalidateRefreshToken(CacheKey.createCacheKeyForFRT(mAuthority, AuthenticationConstants.MS_FAMILY_ID, mUserId)); count += invalidateRefreshToken(CacheKey.createCacheKeyForFRT(mAuthority, AuthenticationConstants.MS_FAMILY_ID, mLoginHint)); return count; } private int invalidateRefreshToken(final String key) { final ITokenCacheStore tokenCacheStore = mAuthenticationContext.getCache(); final TokenCacheItem item = tokenCacheStore.getItem(key); if (item != null) { item.setRefreshToken(INVALID_REFRESH_TOKEN); tokenCacheStore.setItem(key, item); return 1; } return 0; } private AuthenticationCallback<AuthenticationResult> getAdalCallback() { return new AuthenticationCallback<AuthenticationResult>() { @Override public void onSuccess(AuthenticationResult authenticationResult) { final Intent intent = createIntentFromAuthenticationResult(authenticationResult); launchResultActivity(intent); } @Override public void onError(Exception e) { final Intent intent = createIntentFromReturnedException(e); launchResultActivity(intent); } }; } private Intent createIntentFromAuthenticationResult(final AuthenticationResult result) { final Intent intent = new Intent(); intent.putExtra(Constants.ACCESS_TOKEN, result.getAccessToken()); intent.putExtra(Constants.REFRESH_TOKEN, result.getRefreshToken()); intent.putExtra(Constants.ACCESS_TOKEN_TYPE, result.getAccessTokenType()); intent.putExtra(Constants.EXPIRES_ON, result.getExpiresOn().getTime()); intent.putExtra(Constants.TENANT_ID, result.getTenantId()); intent.putExtra(Constants.ID_TOKEN, result.getIdToken()); if (result.getUserInfo() != null) { final UserInfo userInfo = result.getUserInfo(); intent.putExtra(Constants.UNIQUE_ID, userInfo.getUserId()); intent.putExtra(Constants.DISPLAYABLE_ID, userInfo.getDisplayableId()); intent.putExtra(Constants.GIVEN_NAME, userInfo.getGivenName()); intent.putExtra(Constants.FAMILY_NAME, userInfo.getFamilyName()); intent.putExtra(Constants.IDENTITY_PROVIDER, userInfo.getIdentityProvider()); } return intent; } private Intent createIntentFromReturnedException(final Exception e) { final Intent intent = new Intent(); if (!(e instanceof AuthenticationException)) { intent.putExtra(Constants.ERROR, "unknown_exception"); intent.putExtra(Constants.ERROR_DESCRIPTION, "unknown exception returned"); } else { final AuthenticationException authenticationException = (AuthenticationException) e; intent.putExtra(Constants.ERROR, authenticationException.getCode().toString()); intent.putExtra(Constants.ERROR_DESCRIPTION, authenticationException.getLocalizedMessage()); intent.putExtra(Constants.ERROR_CAUSE, authenticationException.getCause()); } return intent; } private void launchResultActivity(final Intent intent) { intent.putExtra(Constants.READ_LOGS, ((AndroidAutomationApp)this.getApplication()).getADALLogs()); intent.setClass(this.getApplicationContext(), ResultActivity.class); this.startActivity(intent); this.finish(); } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.base; import org.chromium.base.annotations.NoSideEffects; import java.util.Locale; /** * Utility class for Logging. * * <p> * Defines logging access points for each feature. They format and forward the logs to * {@link android.util.Log}, allowing to standardize the output, to make it easy to identify * the origin of logs, and enable or disable logging in different parts of the code. * </p> * <p> * Please make use of the formatting capability of the logging methods rather than doing * concatenations in the calling code. In the release builds of Chrome, debug and verbose log * calls will be stripped out of the binary. Concatenations and method calls however will still * remain and be executed. If they can't be avoided, use {@link Log#isEnabled(int)} to guard * such calls. Another possibility is to annotate methods to be called with {@link NoSideEffects}. * </p> * * Usage: * <pre> * Log.FEATURE.d("MyTag", "My %s message", awesome); * </pre> * * Logcat output: * <pre> * D/chromium.Feature (999): [MyTag] My awesome message * </pre> * * Set the log level for a given feature: * <pre> * adb shell setprop log.tag.chromium:Feature VERBOSE * </pre> * *<p> *<b>Notes:</b> * <ul> * <li>For loggers configured to log the origin of debug calls (see {@link #Log(String, boolean)}, * the tag provided for debug and verbose calls will be ignored and replaced in the log with * the file name and the line number.</li> * <li>New features or features not having a dedicated logger: please make a new one rather * than using {@link #ROOT}.</li> * </ul> * </p> */ public class Log { private static final String BASE_TAG = "chromium"; /** * Maximum length for the feature tag. * * A complete tag will look like <code>chromium:FooFeature</code>. Because of the 23 characters * limit on log tags, feature tags have to be restricted to fit. */ private static final int MAX_FEATURE_TAG_LENGTH = 23 - 1 - BASE_TAG.length(); /** * Logger for the "chromium" tag. * Note: Disabling logging for that one will not disable the others. */ public static final Log ROOT = new Log(null, true); @VisibleForTesting final String mTag; private final boolean mDebugWithStack; /** * Creates a new logging access point for the given tag. * @param featureTag The complete log tag will be displayed as "chromium.featureTag". * If <code>null</code>, it will only be "chromium". * @param debugWithStack Whether to replace the secondary tag name with the file name and line * number of the origin of the call for debug and verbose logs. * @throws IllegalArgumentException If <code>featureTag</code> is too long. The complete * tag has to fit within 23 characters. */ protected Log(String featureTag, boolean debugWithStack) { mDebugWithStack = debugWithStack; if (featureTag == null) { mTag = BASE_TAG; return; } else if (featureTag.length() > MAX_FEATURE_TAG_LENGTH) { throw new IllegalArgumentException( "The feature tag can be at most " + MAX_FEATURE_TAG_LENGTH + " characters."); } else { mTag = BASE_TAG + "." + featureTag; } } /** Returns whether this logger is currently allowed to send logs.*/ public boolean isEnabled(int level) { return android.util.Log.isLoggable(mTag, level); } /** Returns a formatted log message, using the supplied format and arguments.*/ @VisibleForTesting protected String formatLog(String secondaryTag, String messageTemplate, Object... params) { if (params != null && params.length != 0) { messageTemplate = String.format(Locale.US, messageTemplate, params); } return "[" + secondaryTag + "] " + messageTemplate; } /** * Sends a {@link android.util.Log#VERBOSE} log message. * * For optimization purposes, only the fixed parameters versions are visible. If you need more * than 7 parameters, consider building your log message using a function annotated with * {@link NoSideEffects}. * * @param secondaryTag Used to identify the source of a log message. It usually identifies the * class where the log call occurs. If the logger is configured to log the * call's origin (see {@link #Log(String, boolean)}, this parameter is * unused and will be replaced in the log message with the file name and * the line number. * @param messageTemplate The message you would like logged. It is to be specified as a format * string. * @param args Arguments referenced by the format specifiers in the format string. If the last * one is a {@link Throwable}, its trace will be printed. */ private void verbose(String secondaryTag, String messageTemplate, Object... args) { if (isEnabled(android.util.Log.VERBOSE)) { if (mDebugWithStack) secondaryTag = getCallOrigin(); String message = formatLog(secondaryTag, messageTemplate, args); Throwable tr = getThrowableToLog(args); if (tr != null) { android.util.Log.v(mTag, message, tr); } else { android.util.Log.v(mTag, message); } } } /** Sends a {@link android.util.Log#VERBOSE} log message. 0 arg version. */ public void v(String secondaryTag, String message) { verbose(secondaryTag, message); } /** Sends a {@link android.util.Log#VERBOSE} log message. 1 arg version. */ public void v(String secondaryTag, String messageTemplate, Object arg1) { verbose(secondaryTag, messageTemplate, arg1); } /** Sends a {@link android.util.Log#VERBOSE} log message. 2 args version */ public void v(String secondaryTag, String messageTemplate, Object arg1, Object arg2) { verbose(secondaryTag, messageTemplate, arg1, arg2); } /** Sends a {@link android.util.Log#VERBOSE} log message. 3 args version */ public void v( String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3) { verbose(secondaryTag, messageTemplate, arg1, arg2, arg3); } /** Sends a {@link android.util.Log#VERBOSE} log message. 4 args version */ public void v(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4) { verbose(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4); } /** Sends a {@link android.util.Log#VERBOSE} log message. 5 args version */ public void v(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4, Object arg5) { verbose(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4, arg5); } /** Sends a {@link android.util.Log#VERBOSE} log message. 6 args version */ public void v(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4, Object arg5, Object arg6) { verbose(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4, arg5, arg6); } /** Sends a {@link android.util.Log#VERBOSE} log message. 7 args version */ public void v(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4, Object arg5, Object arg6, Object arg7) { verbose(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4, arg5, arg6, arg7); } /** * Sends a {@link android.util.Log#DEBUG} log message. * * For optimization purposes, only the fixed parameters versions are visible. If you need more * than 7 parameters, consider building your log message using a function annotated with * {@link NoSideEffects}. * * @param secondaryTag Used to identify the source of a log message. It usually identifies the * class where the log call occurs. If the logger is configured to log the * call's origin (see {@link #Log(String, boolean)}, this parameter is * unused and will be replaced in the log message with the file name and * the line number. * @param messageTemplate The message you would like logged. It is to be specified as a format * string. * @param args Arguments referenced by the format specifiers in the format string. If the last * one is a {@link Throwable}, its trace will be printed. */ private void debug(String secondaryTag, String messageTemplate, Object... args) { if (isEnabled(android.util.Log.DEBUG)) { if (mDebugWithStack) secondaryTag = getCallOrigin(); String message = formatLog(secondaryTag, messageTemplate, args); Throwable tr = getThrowableToLog(args); if (tr != null) { android.util.Log.d(mTag, message, tr); } else { android.util.Log.d(mTag, message); } } } /** Sends a {@link android.util.Log#DEBUG} log message. 0 arg version. */ public void d(String secondaryTag, String message) { debug(secondaryTag, message); } /** Sends a {@link android.util.Log#DEBUG} log message. 1 arg version. */ public void d(String secondaryTag, String messageTemplate, Object arg1) { debug(secondaryTag, messageTemplate, arg1); } /** Sends a {@link android.util.Log#DEBUG} log message. 2 args version */ public void d(String secondaryTag, String messageTemplate, Object arg1, Object arg2) { debug(secondaryTag, messageTemplate, arg1, arg2); } /** Sends a {@link android.util.Log#DEBUG} log message. 3 args version */ public void d( String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3) { debug(secondaryTag, messageTemplate, arg1, arg2, arg3); } /** Sends a {@link android.util.Log#DEBUG} log message. 4 args version */ public void d(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4) { debug(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4); } /** Sends a {@link android.util.Log#DEBUG} log message. 5 args version */ public void d(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4, Object arg5) { debug(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4, arg5); } /** Sends a {@link android.util.Log#DEBUG} log message. 6 args version */ public void d(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4, Object arg5, Object arg6) { debug(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4, arg5, arg6); } /** Sends a {@link android.util.Log#DEBUG} log message. 7 args version */ public void d(String secondaryTag, String messageTemplate, Object arg1, Object arg2, Object arg3, Object arg4, Object arg5, Object arg6, Object arg7) { debug(secondaryTag, messageTemplate, arg1, arg2, arg3, arg4, arg5, arg6, arg7); } /** * Sends an {@link android.util.Log#INFO} log message. * * @param secondaryTag Used to identify the source of a log message. It usually identifies the * class where the log call occurs. * @param messageTemplate The message you would like logged. It is to be specified as a format * string. * @param args Arguments referenced by the format specifiers in the format string. If the last * one is a {@link Throwable}, its trace will be printed. */ public void i(String secondaryTag, String messageTemplate, Object... args) { if (isEnabled(android.util.Log.INFO)) { String message = formatLog(secondaryTag, messageTemplate, args); Throwable tr = getThrowableToLog(args); if (tr != null) { android.util.Log.i(mTag, message, tr); } else { android.util.Log.i(mTag, message); } } } /** * Sends a {@link android.util.Log#WARN} log message. * * @param secondaryTag Used to identify the source of a log message. It usually identifies the * class where the log call occurs. * @param messageTemplate The message you would like logged. It is to be specified as a format * string. * @param args Arguments referenced by the format specifiers in the format string. If the last * one is a {@link Throwable}, its trace will be printed. */ public void w(String secondaryTag, String messageTemplate, Object... args) { if (isEnabled(android.util.Log.WARN)) { String message = formatLog(secondaryTag, messageTemplate, args); Throwable tr = getThrowableToLog(args); if (tr != null) { android.util.Log.w(mTag, message, tr); } else { android.util.Log.w(mTag, message); } } } /** * Sends an {@link android.util.Log#ERROR} log message. * * @param secondaryTag Used to identify the source of a log message. It usually identifies the * class where the log call occurs. * @param messageTemplate The message you would like logged. It is to be specified as a format * string. * @param args Arguments referenced by the format specifiers in the format string. If the last * one is a {@link Throwable}, its trace will be printed. */ public void e(String secondaryTag, String messageTemplate, Object... args) { if (isEnabled(android.util.Log.ERROR)) { String message = formatLog(secondaryTag, messageTemplate, args); Throwable tr = getThrowableToLog(args); if (tr != null) { android.util.Log.e(mTag, message, tr); } else { android.util.Log.e(mTag, message); } } } /** * What a Terrible Failure: Used for conditions that should never happen, and logged at * the {@link android.util.Log#ASSERT} level. Depending on the configuration, it might * terminate the process. * * @see android.util.Log#wtf(String, String, Throwable) * * @param secondaryTag Used to identify the source of a log message. It usually identifies the * class where the log call occurs. * @param messageTemplate The message you would like logged. It is to be specified as a format * string. * @param args Arguments referenced by the format specifiers in the format string. If the last * one is a {@link Throwable}, its trace will be printed. */ public void wtf(String secondaryTag, String messageTemplate, Object... args) { if (isEnabled(android.util.Log.ERROR)) { String message = formatLog(secondaryTag, messageTemplate, args); Throwable tr = getThrowableToLog(args); if (tr != null) { android.util.Log.wtf(mTag, message, tr); } else { android.util.Log.wtf(mTag, message); } } } private Throwable getThrowableToLog(Object[] args) { if (args == null || args.length == 0) return null; Object lastArg = args[args.length - 1]; if (!(lastArg instanceof Throwable)) return null; return (Throwable) lastArg; } /** Returns a string form of the origin of the log call, to be used as secondary tag.*/ private String getCallOrigin() { StackTraceElement[] st = Thread.currentThread().getStackTrace(); // The call stack should look like: // n [a variable number of calls depending on the vm used] // +0 getCallOrigin() // +1 privateLogFunction: verbose or debug // +2 logFunction: v or d // +3 caller int callerStackIndex; String logClassName = Log.class.getName(); for (callerStackIndex = 0; callerStackIndex < st.length; callerStackIndex++) { if (st[callerStackIndex].getClassName().equals(logClassName)) { callerStackIndex += 3; break; } } return st[callerStackIndex].getFileName() + ":" + st[callerStackIndex].getLineNumber(); } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. See accompanying LICENSE file. */ package org.apache.hadoop.security.authentication.server; import org.apache.hadoop.security.authentication.client.AuthenticatedURL; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.util.Signer; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.Arrays; import java.util.Properties; import java.util.Vector; public class TestAuthenticationFilter { @Test public void testGetConfiguration() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn(""); Mockito.when(config.getInitParameter("a")).thenReturn("A"); Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("a")).elements()); Properties props = filter.getConfiguration("", config); Assert.assertEquals("A", props.getProperty("a")); config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo"); Mockito.when(config.getInitParameter("foo.a")).thenReturn("A"); Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("foo.a")).elements()); props = filter.getConfiguration("foo.", config); Assert.assertEquals("A", props.getProperty("a")); } @Test public void testInitEmpty() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>().elements()); filter.init(config); Assert.fail(); } catch (ServletException ex) { // Expected } catch (Exception ex) { Assert.fail(); } finally { filter.destroy(); } } public static class DummyAuthenticationHandler implements AuthenticationHandler { public static boolean init; public static boolean managementOperationReturn; public static boolean destroy; public static boolean expired; public static final String TYPE = "dummy"; public static void reset() { init = false; destroy = false; } @Override public void init(Properties config) throws ServletException { init = true; managementOperationReturn = config.getProperty("management.operation.return", "true").equals("true"); expired = config.getProperty("expired.token", "false").equals("true"); } @Override public boolean managementOperation(AuthenticationToken token, HttpServletRequest request, HttpServletResponse response) throws IOException, AuthenticationException { if (!managementOperationReturn) { response.setStatus(HttpServletResponse.SC_ACCEPTED); } return managementOperationReturn; } @Override public void destroy() { destroy = true; } @Override public String getType() { return TYPE; } @Override public AuthenticationToken authenticate(HttpServletRequest request, HttpServletResponse response) throws IOException, AuthenticationException { AuthenticationToken token = null; String param = request.getParameter("authenticated"); if (param != null && param.equals("true")) { token = new AuthenticationToken("u", "p", "t"); token.setExpires((expired) ? 0 : System.currentTimeMillis() + 1000); } else { response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); } return token; } } @Test public void testInit() throws Exception { // minimal configuration & simple auth handler (Pseudo) AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("simple"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TOKEN_VALIDITY)).thenReturn("1000"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.AUTH_TOKEN_VALIDITY)).elements()); filter.init(config); Assert.assertEquals(PseudoAuthenticationHandler.class, filter.getAuthenticationHandler().getClass()); Assert.assertTrue(filter.isRandomSecret()); Assert.assertNull(filter.getCookieDomain()); Assert.assertNull(filter.getCookiePath()); Assert.assertEquals(1000, filter.getValidity()); } finally { filter.destroy(); } // custom secret filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("simple"); Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.SIGNATURE_SECRET)).elements()); filter.init(config); Assert.assertFalse(filter.isRandomSecret()); } finally { filter.destroy(); } // custom cookie domain and cookie path filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("simple"); Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_DOMAIN)).thenReturn(".foo.com"); Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_PATH)).thenReturn("/bar"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.COOKIE_DOMAIN, AuthenticationFilter.COOKIE_PATH)).elements()); filter.init(config); Assert.assertEquals(".foo.com", filter.getCookieDomain()); Assert.assertEquals("/bar", filter.getCookiePath()); } finally { filter.destroy(); } // authentication handler lifecycle, and custom impl DummyAuthenticationHandler.reset(); filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, "management.operation.return")).elements()); filter.init(config); Assert.assertTrue(DummyAuthenticationHandler.init); } finally { filter.destroy(); Assert.assertTrue(DummyAuthenticationHandler.destroy); } // kerberos auth handler filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("kerberos"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements()); filter.init(config); } catch (ServletException ex) { // Expected } finally { Assert.assertEquals(KerberosAuthenticationHandler.class, filter.getAuthenticationHandler().getClass()); filter.destroy(); } } @Test public void testGetRequestURL() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, "management.operation.return")).elements()); filter.init(config); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar")); Mockito.when(request.getQueryString()).thenReturn("a=A&b=B"); Assert.assertEquals("http://foo:8080/bar?a=A&b=B", filter.getRequestURL(request)); } finally { filter.destroy(); } } @Test public void testGetToken() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.SIGNATURE_SECRET, "management.operation.return")).elements()); filter.init(config); AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE); token.setExpires(System.currentTimeMillis() + 1000); Signer signer = new Signer("secret".getBytes()); String tokenSigned = signer.sign(token.toString()); Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie}); AuthenticationToken newToken = filter.getToken(request); Assert.assertEquals(token.toString(), newToken.toString()); } finally { filter.destroy(); } } @Test public void testGetTokenExpired() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")).thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.SIGNATURE_SECRET, "management.operation.return")).elements()); filter.init(config); AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype"); token.setExpires(System.currentTimeMillis() - 1000); Signer signer = new Signer("secret".getBytes()); String tokenSigned = signer.sign(token.toString()); Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie}); try { filter.getToken(request); Assert.fail(); } catch (AuthenticationException ex) { // Expected } catch (Exception ex) { Assert.fail(); } } finally { filter.destroy(); } } @Test public void testGetTokenInvalidType() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.SIGNATURE_SECRET, "management.operation.return")).elements()); filter.init(config); AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype"); token.setExpires(System.currentTimeMillis() + 1000); Signer signer = new Signer("secret".getBytes()); String tokenSigned = signer.sign(token.toString()); Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie}); try { filter.getToken(request); Assert.fail(); } catch (AuthenticationException ex) { // Expected } catch (Exception ex) { Assert.fail(); } } finally { filter.destroy(); } } @Test public void testDoFilterNotAuthenticated() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, "management.operation.return")).elements()); filter.init(config); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar")); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); FilterChain chain = Mockito.mock(FilterChain.class); Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Assert.fail(); return null; } } ).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject()); filter.doFilter(request, response, chain); Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED); } finally { filter.destroy(); } } private void _testDoFilterAuthentication(boolean withDomainPath, boolean invalidToken, boolean expired) throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter("expired.token")). thenReturn(Boolean.toString(expired)); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TOKEN_VALIDITY)).thenReturn("1000"); Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.AUTH_TOKEN_VALIDITY, AuthenticationFilter.SIGNATURE_SECRET, "management.operation.return", "expired.token")).elements()); if (withDomainPath) { Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_DOMAIN)).thenReturn(".foo.com"); Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_PATH)).thenReturn("/bar"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE, AuthenticationFilter.AUTH_TOKEN_VALIDITY, AuthenticationFilter.SIGNATURE_SECRET, AuthenticationFilter.COOKIE_DOMAIN, AuthenticationFilter.COOKIE_PATH, "management.operation.return")).elements()); } filter.init(config); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getParameter("authenticated")).thenReturn("true"); Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar")); Mockito.when(request.getQueryString()).thenReturn("authenticated=true"); if (invalidToken) { Mockito.when(request.getCookies()).thenReturn( new Cookie[] { new Cookie(AuthenticatedURL.AUTH_COOKIE, "foo")} ); } HttpServletResponse response = Mockito.mock(HttpServletResponse.class); FilterChain chain = Mockito.mock(FilterChain.class); final boolean[] calledDoFilter = new boolean[1]; Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { calledDoFilter[0] = true; return null; } } ).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject()); final Cookie[] setCookie = new Cookie[1]; Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); setCookie[0] = (Cookie) args[0]; return null; } } ).when(response).addCookie(Mockito.<Cookie>anyObject()); filter.doFilter(request, response, chain); if (expired) { Mockito.verify(response, Mockito.never()). addCookie(Mockito.any(Cookie.class)); } else { Assert.assertNotNull(setCookie[0]); Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName()); Assert.assertTrue(setCookie[0].getValue().contains("u=")); Assert.assertTrue(setCookie[0].getValue().contains("p=")); Assert.assertTrue(setCookie[0].getValue().contains("t=")); Assert.assertTrue(setCookie[0].getValue().contains("e=")); Assert.assertTrue(setCookie[0].getValue().contains("s=")); Assert.assertTrue(calledDoFilter[0]); Signer signer = new Signer("secret".getBytes()); String value = signer.verifyAndExtract(setCookie[0].getValue()); AuthenticationToken token = AuthenticationToken.parse(value); Assert.assertEquals(System.currentTimeMillis() + 1000 * 1000, token.getExpires(), 100); if (withDomainPath) { Assert.assertEquals(".foo.com", setCookie[0].getDomain()); Assert.assertEquals("/bar", setCookie[0].getPath()); } else { Assert.assertNull(setCookie[0].getDomain()); Assert.assertNull(setCookie[0].getPath()); } } } finally { filter.destroy(); } } @Test public void testDoFilterAuthentication() throws Exception { _testDoFilterAuthentication(false, false, false); } @Test public void testDoFilterAuthenticationImmediateExpiration() throws Exception { _testDoFilterAuthentication(false, false, true); } @Test public void testDoFilterAuthenticationWithInvalidToken() throws Exception { _testDoFilterAuthentication(false, true, false); } @Test public void testDoFilterAuthenticationWithDomainPath() throws Exception { _testDoFilterAuthentication(true, false, false); } @Test public void testDoFilterAuthenticated() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, "management.operation.return")).elements()); filter.init(config); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar")); AuthenticationToken token = new AuthenticationToken("u", "p", "t"); token.setExpires(System.currentTimeMillis() + 1000); Signer signer = new Signer("secret".getBytes()); String tokenSigned = signer.sign(token.toString()); Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned); Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie}); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); FilterChain chain = Mockito.mock(FilterChain.class); Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); HttpServletRequest request = (HttpServletRequest) args[0]; Assert.assertEquals("u", request.getRemoteUser()); Assert.assertEquals("p", request.getUserPrincipal().getName()); return null; } } ).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject()); filter.doFilter(request, response, chain); } finally { filter.destroy(); } } @Test public void testDoFilterAuthenticatedExpired() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, "management.operation.return")).elements()); filter.init(config); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar")); AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE); token.setExpires(System.currentTimeMillis() - 1000); Signer signer = new Signer("secret".getBytes()); String tokenSigned = signer.sign(token.toString()); Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned); Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie}); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); FilterChain chain = Mockito.mock(FilterChain.class); Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Assert.fail(); return null; } } ).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject()); final Cookie[] setCookie = new Cookie[1]; Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); setCookie[0] = (Cookie) args[0]; return null; } } ).when(response).addCookie(Mockito.<Cookie>anyObject()); filter.doFilter(request, response, chain); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString()); Assert.assertNotNull(setCookie[0]); Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName()); Assert.assertEquals("", setCookie[0].getValue()); } finally { filter.destroy(); } } @Test public void testDoFilterAuthenticatedInvalidType() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("true"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn( DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, "management.operation.return")).elements()); filter.init(config); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar")); AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype"); token.setExpires(System.currentTimeMillis() + 1000); Signer signer = new Signer("secret".getBytes()); String tokenSigned = signer.sign(token.toString()); Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned); Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie}); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); FilterChain chain = Mockito.mock(FilterChain.class); Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Assert.fail(); return null; } } ).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject()); final Cookie[] setCookie = new Cookie[1]; Mockito.doAnswer( new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); setCookie[0] = (Cookie) args[0]; return null; } } ).when(response).addCookie(Mockito.<Cookie>anyObject()); filter.doFilter(request, response, chain); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString()); Assert.assertNotNull(setCookie[0]); Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName()); Assert.assertEquals("", setCookie[0].getValue()); } finally { filter.destroy(); } } @Test public void testManagementOperation() throws Exception { AuthenticationFilter filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); Mockito.when(config.getInitParameter("management.operation.return")). thenReturn("false"); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)). thenReturn(DummyAuthenticationHandler.class.getName()); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector<String>( Arrays.asList(AuthenticationFilter.AUTH_TYPE, "management.operation.return")).elements()); filter.init(config); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getRequestURL()). thenReturn(new StringBuffer("http://foo:8080/bar")); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); FilterChain chain = Mockito.mock(FilterChain.class); filter.doFilter(request, response, chain); Mockito.verify(response).setStatus(HttpServletResponse.SC_ACCEPTED); Mockito.verifyNoMoreInteractions(response); Mockito.reset(request); Mockito.reset(response); AuthenticationToken token = new AuthenticationToken("u", "p", "t"); token.setExpires(System.currentTimeMillis() + 1000); Signer signer = new Signer("secret".getBytes()); String tokenSigned = signer.sign(token.toString()); Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned); Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie}); filter.doFilter(request, response, chain); Mockito.verify(response).setStatus(HttpServletResponse.SC_ACCEPTED); Mockito.verifyNoMoreInteractions(response); } finally { filter.destroy(); } } }
package com.maxheapsize.quant.testng; import com.maxheapsize.quant.ClassTester; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public final class TestNGClassTester extends TestNGBase implements ClassTester { private boolean testAnnotationWithValidTestGroupOnClass; private Class klass; private List<String> validTestGroups = new ArrayList<String>(); private List<Method> nonTestAnnotatedPublicVoidMethods = new ArrayList<Method>(); private List<Method> methodsWithWrongTestGroup = new ArrayList<Method>(); private boolean useOnlyAnnotatedMethods = false; private boolean ignoreAbstractClass = true; private TestNGAnnotationInspector testNGAnnotationInspector; private Map<String, Integer> testGroups = new HashMap<String, Integer>(); // Builder public static class Builder { private final Class klass; private List<String> validTestGroups = new ArrayList<String>(); private boolean useOnlyAnnotatedMethods = false; private boolean ignoreAbstractClass = true; /** * Constructs a ClassTester which will examine the given class. * * @param klass Class to be examined. */ public Builder(Class klass) { super(); this.klass = klass; } /** * Add a test group to the pool of test groups which all test methods should belong to. * * @param testGroupName * * @return Builder */ public final Builder addTestGroup(String testGroupName) { validTestGroups.add(testGroupName); return this; } /** * Only check methods which are annotated. Do not report methods which are public void but do not have a @Test annotation. * * @return Builder */ public final Builder useOnlyAnnotatedMethods() { useOnlyAnnotatedMethods = true; return this; } /** * Also run the checks against abstract classes. * * @return Builder */ public final Builder doNotIgnoreAbstractClass() { ignoreAbstractClass = false; return this; } /** * Sets the tests groups which the tests need to be in. * * @param groupNames names of the testgroups * * @return Builder */ public final Builder setTestGroups(List<String> groupNames) { validTestGroups = groupNames; return this; } /** * Build the ClassTester * * @return ClassTester */ public final ClassTester build() { return new TestNGClassTester(this); } } /** * Creates a Builder to create the ClassTester. * * @param klass Class to be examined. * * @return Builder */ public static Builder createBuilder(Class klass) { return new Builder(klass); } private TestNGClassTester(Builder builder) { super(builder.klass); testNGAnnotationInspector = new TestNGAnnotationInspector(); this.klass = builder.klass; this.validTestGroups = builder.validTestGroups; this.useOnlyAnnotatedMethods = builder.useOnlyAnnotatedMethods; this.ignoreAbstractClass = builder.ignoreAbstractClass; examineClass(); } private void examineClass() { setPublicVoidMethods(getPublicVoidMethods(klass)); nonTestAnnotatedPublicVoidMethods = testNGAnnotationInspector.getNonTestAnnotatedPublicVoidMethod(getPublicVoidMethods()); testAnnotationWithValidTestGroupOnClass = checkForTestAnnotationWithValidTestGroupOnClass(); methodsWithWrongTestGroup = getMethodsNotConfirmingToSpecification(); testGroups = getTestGroups(); } private Map<String, Integer> getTestGroups() { Map<String, Integer> result = new HashMap<String, Integer>(); for (Method method : getPublicVoidMethods()) { Annotation[] annotations = method.getAnnotations(); for (Annotation annotation : annotations) { if (testNGAnnotationInspector.isTestAnnotation(annotation)) { String[] testAnnotationGroups = testNGAnnotationInspector.getTestGroupsFromAnnotation(annotation); for (String testAnnotationGroup : testAnnotationGroups) { if (result.containsKey(testAnnotationGroup)) { result.put(testAnnotationGroup, result.get(testAnnotationGroup) + 1); } else { result.put(testAnnotationGroup, 1); } } } } } return result; } // Public methods public boolean isInvalidTestClass() { return !(allTestMethodsHaveValidTestGroup() || (isAbstractClass() && ignoreAbstractClass)); } public boolean isValidTestClass() { return !isInvalidTestClass(); } public Map<String, Integer> getTestGroupCount() { return testGroups; } public String reportViolation() { StringBuffer result = new StringBuffer(); result.append("\nReport for Class ").append(klass.getName()); result.append("\n"); result.append("Ignore abstract classes: ").append(ignoreAbstractClass); result.append("\n"); if (!validTestGroups.isEmpty()) { result.append("Specified TestGroups : "); for (String validTestGroup : validTestGroups) { result.append(" + ").append(validTestGroup); } result.append("\n"); } if (!nonTestAnnotatedPublicVoidMethods.isEmpty()) { result.append(reportMethods("Non TestAnnotated methods", nonTestAnnotatedPublicVoidMethods)); } if (!methodsWithWrongTestGroup.isEmpty()) { result.append(reportMethods("Methods with wrong test group: ", methodsWithWrongTestGroup)); } return result.toString(); } // Private Methods private boolean isAbstractClass() { int modifier = klass.getModifiers(); return Modifier.isAbstract(modifier); } private boolean allTestMethodsHaveValidTestGroup() { if (testAnnotationWithValidTestGroupOnClass) { return true; } else if (methodsWithWrongTestGroup.isEmpty()) { return true; } return false; } private List<Method> getMethodsNotConfirmingToSpecification() { List<Method> result = new ArrayList<Method>(); for (Method publicVoidMethod : getPublicVoidMethods()) { if (!methodConfirmsToSpecification(publicVoidMethod)) { result.add(publicVoidMethod); } } return result; } private boolean checkForTestAnnotationWithValidTestGroupOnClass() { Annotation[] annotations = klass.getAnnotations(); for (Annotation annotation : annotations) { if (checkTestAnnotationForTestGroups(annotation)) { return true; } } return false; } private boolean checkTestAnnotationForTestGroups(Annotation annotation) { if (testNGAnnotationInspector.isTestAnnotation(annotation)) { String[] testGroupsOnAnnotation = testNGAnnotationInspector.getTestGroupsFromAnnotation(annotation); for (String group : testGroupsOnAnnotation) { if (testAnnotationGroupIsInExpectedTestGroup(group) || expectedTestGroupIsEmpty()) { return true; } } } return false; } private boolean expectedTestGroupIsEmpty() { return validTestGroups.isEmpty(); } private boolean testAnnotationGroupIsInExpectedTestGroup(String group) { return validTestGroups.contains(group); } private String reportMethods(String header, List<Method> methods) { StringBuffer result = new StringBuffer(); result.append("* ").append(header).append("\n"); for (Method method : methods) { result.append(" - ").append(method.getName()).append("\n"); } return result.toString(); } private boolean methodConfirmsToSpecification(Method method) { Annotation[] annotations = method.getAnnotations(); boolean hasTestAnnotations = false; for (Annotation annotation : annotations) { if (testNGAnnotationInspector.isTestAnnotation(annotation)) { hasTestAnnotations = true; if (expectedTestGroupIsEmpty()) { return true; } else { String[] testAnnotationGroups = testNGAnnotationInspector.getTestGroupsFromAnnotation(annotation); if (testAnnotationGroups.length == 0) { return false; } for (String testGroup : testAnnotationGroups) { if (testAnnotationGroupIsInExpectedTestGroup(testGroup)) { return true; } } } } } return !hasTestAnnotations && useOnlyAnnotatedMethods; } }
package com.andrewkeeton.divide.and.conquer.card.shuffler; import static junit.framework.Assert.*; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; public class PileOfCards<T> { private static SecureRandom mRandomSeeder = new SecureRandom(); private static SecureRandom mRandomShuffler = new SecureRandom(); public ArrayList<Card<T>> mCards; public int mMinKey, mMaxKey; public PileOfCards() { mCards = new ArrayList<Card<T>>(); } public PileOfCards(List<T> values, int minKey, int maxKey) { int numCards = maxKey - minKey + 1; if (values != null) { assertEquals("values.size() == numCards", numCards, values.size()); } mCards = new ArrayList<Card<T>>(numCards); mMinKey = minKey; mMaxKey = maxKey; for (int i = 0; i < numCards; i++) { if (values != null) { mCards.add(new Card<T>(minKey + i, values.get(i))); } else { mCards.add(new Card<T>(minKey + i, null)); } } } /** * Sorts the pile of cards into separate piles (a la bucket sort) and returns the new piles. * The list of moves is added to the moves list. * @param numOutPiles The number of piles to sort into. * @param moves Pile numbers are added to this list as the pile is sorted. */ public ArrayList<PileOfCards<T>> sortIntoPiles(int numOutPiles, ArrayList<Move> moves) { ArrayList<PileOfCards<T>> outPiles = new ArrayList<PileOfCards<T>>(numOutPiles); PileOfCards<T> inPile = this; int numCards = inPile.mMaxKey - inPile.mMinKey + 1; int baseNumCardsPerOutPile = numCards / numOutPiles; int remainingNumCards = numCards % numOutPiles; int minKey = inPile.mMinKey; // Distribute the key ranges amongst the out piles. for (int i = 0; i < numOutPiles; i++) { PileOfCards<T> outPile = new PileOfCards<T>(); outPile.mMinKey = minKey; outPile.mMaxKey = minKey + baseNumCardsPerOutPile - 1; // Distribute one remaining card to each out pile while there are still remaining cards. if (remainingNumCards > 0) { outPile.mMaxKey++; remainingNumCards--; } minKey = outPile.mMaxKey + 1; outPiles.add(outPile); } /// Move the cards from the in pile into the appropriate out pile. for (Card<T> card : inPile.mCards) { int pileNum = 0; for (PileOfCards<T> outPile : outPiles) { if (card.mKey >= outPile.mMinKey && card.mKey <= outPile.mMaxKey) { outPile.mCards.add(0, card); moves.add(new Move(Move.MoveType.DEAL, pileNum)); break; } pileNum++; } } /// return outPiles; } /** * Sorts this deck (pile) completely using a series of deals, pickups, and "bottoms." * @param numOutPiles Number of piles to deal into. * @return A list of moves that describe how this pile was sorted. */ public ArrayList<Move> sortDeckCompletely(int numOutPiles) { int deckStartingSize = this.size(); // The starting deck (pile) will soon split into a deck consisting of many piles. ArrayList<PileOfCards<T>> deck = new ArrayList<PileOfCards<T>>(); deck.add(this); final int maxNumMoves = (int) Math.ceil(logOfBase(numOutPiles, deckStartingSize)) * deckStartingSize; int numMovesLeft = maxNumMoves; ArrayList<Move> moves = new ArrayList<Move>(maxNumMoves); PileOfCards<T> deckPile = null; while (true) { // Check for piles of one. int numCardsMovedToBottom = 0; while (true) { if (numMovesLeft == 0) { break; } deckPile = deck.remove(0); if (deckPile.size() != 1) { break; } // Piles of one don't need to be sorted - just move them to the bottom. numMovesLeft--; numCardsMovedToBottom++; deck.add(deckPile); } if (numCardsMovedToBottom > 0) { moves.add(new Move(Move.MoveType.BOTTOM, numCardsMovedToBottom)); } if (numMovesLeft == 0) { break; } assertTrue("deckPile.size() > 1", deckPile.size() > 1); // Sort piles from the deck that have more than one card into new piles. // Then pick up those piles and put them on the bottom of the deck. numMovesLeft -= deckPile.size(); ArrayList<PileOfCards<T>> piles = deckPile.sortIntoPiles(numOutPiles, moves); int sizeOfPiles = 0; for (PileOfCards<T> pile : piles) { if (pile.size() > 0) { sizeOfPiles += pile.size(); deck.add(pile); } } moves.add(new Move(Move.MoveType.PICKUP, sizeOfPiles)); assertTrue("numMovesLeft >= 0", numMovesLeft >= 0); if (numMovesLeft == 0) { break; } } /// Make this pile look like the deck of one-card piles. mCards.clear(); Card<T> cardPrev = null; for (PileOfCards<T> pile : deck) { assertTrue("pile.size() == 1", pile.size() == 1); Card<T> card = pile.mCards.get(0); if (cardPrev != null) { assertTrue("card.mKey > cardPrev.mKey", card.mKey > cardPrev.mKey); cardPrev = card; } mCards.add(pile.mCards.get(0)); } /// assertTrue("this.size() == deckStartingSize", this.size() == deckStartingSize); return moves; } public void setCardValues(List<T> values) { assertTrue("values.size() == mCards.size()", values.size() == mCards.size()); for (int i = 0; i < values.size(); i++) { mCards.get(i).mValue = values.get(i); } } public void shuffle() { shuffle(mRandomShuffler, 8, 134); // (2^[64 bits])^134 > 999! } public void shuffle(long seed) { Collections.shuffle(mCards, new Random(seed)); } public void shuffle(Random random, int numSeedBytes, int numShuffles) { for (int i = 0; i < numShuffles; i++) { byte seedBytes[] = mRandomSeeder.generateSeed(numSeedBytes); long seed = 0; for (int j = 0; j < seedBytes.length; j++) { seed |= (((long) seedBytes[j]) & 0xFF) << (j * Byte.SIZE); } random.setSeed(seed); Collections.shuffle(mCards, random); } } public int size() { return mCards.size(); } @Override public String toString() { StringBuilder str = new StringBuilder(); for (Card<T> card : mCards) { str.append(card.toString()); str.append(", "); } str.setLength(str.length() - 2); return str.toString(); } public String toStringSmall() { StringBuilder str = new StringBuilder(); for (Card<T> card : mCards) { str.append(card.toString()); } return str.toString(); } public static double logOfBase(double base, double num) { return Math.log(num) / Math.log(base); } public static String bytesToHex(byte[] bytes) { final char[] hexArray = {'0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'}; char[] hexChars = new char[bytes.length * 2]; int v; for ( int j = 0; j < bytes.length; j++ ) { v = bytes[j] & 0xFF; hexChars[j * 2] = hexArray[v >>> 4]; hexChars[j * 2 + 1] = hexArray[v & 0x0F]; } return new String(hexChars); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.geo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.Line; import org.elasticsearch.geometry.LinearRing; import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.MultiPolygon; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.geometry.utils.GeographyValidator; import java.io.IOException; import java.text.ParseException; import java.util.Arrays; import java.util.Collections; /** * Tests for {@code GeoJSONShapeParser} */ public class GeoJsonParserTests extends BaseGeoParsingTestCase { @Override public void testParsePoint() throws IOException { XContentBuilder pointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Point") .startArray("coordinates").value(100.0).value(0.0).endArray() .endObject(); Point expected = new Point(100.0, 0.0); assertGeometryEquals(expected, pointGeoJson); } @Override public void testParseLineString() throws IOException { XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject(); Line expected = new Line(new double[] { 100.0, 101.0}, new double[] {0.0, 1.0}); try (XContentParser parser = createParser(lineGeoJson)) { parser.nextToken(); assertEquals(expected, new GeoJson(false, false, new GeographyValidator(true)).fromXContent(parser)); } } @Override public void testParseMultiLineString() throws IOException { XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "MultiLineString") .startArray("coordinates") .startArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .endArray() .endArray() .endObject(); MultiLine expected = new MultiLine(Arrays.asList( new Line(new double[] { 100.0, 101.0}, new double[] {0.0, 1.0}), new Line(new double[] { 102.0, 103.0}, new double[] {2.0, 3.0}) )); assertGeometryEquals(expected, multilinesGeoJson); } public void testParseCircle() throws IOException { XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "circle") .startArray("coordinates").value(100.0).value(0.0).endArray() .field("radius", "200m") .endObject(); Circle expected = new Circle(100.0, 0.0, 200); assertGeometryEquals(expected, multilinesGeoJson); } public void testParseMultiDimensionShapes() throws IOException { // multi dimension point XContentBuilder pointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Point") .startArray("coordinates").value(100.0).value(0.0).value(15.0).value(18.0).endArray() .endObject(); try (XContentParser parser = createParser(pointGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // multi dimension linestring XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).value(15.0).endArray() .startArray().value(101.0).value(1.0).value(18.0).value(19.0).endArray() .endArray() .endObject(); try (XContentParser parser = createParser(lineGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @Override public void testParseEnvelope() throws IOException { // test #1: envelope with expected coordinate order (TopLeft, BottomRight) XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", randomBoolean() ? "envelope" : "bbox") .startArray("coordinates") .startArray().value(-50).value(30).endArray() .startArray().value(50).value(-30).endArray() .endArray() .endObject(); Rectangle expected = new Rectangle(-50, 50, 30, -30); assertGeometryEquals(expected, multilinesGeoJson); // test #2: envelope that spans dateline multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", randomBoolean() ? "envelope" : "bbox") .startArray("coordinates") .startArray().value(50).value(30).endArray() .startArray().value(-50).value(-30).endArray() .endArray() .endObject(); expected = new Rectangle(50, -50, 30, -30); assertGeometryEquals(expected, multilinesGeoJson); // test #3: "envelope" (actually a triangle) with invalid number of coordinates (TopRight, BottomLeft, BottomRight) multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", randomBoolean() ? "envelope" : "bbox") .startArray("coordinates") .startArray().value(50).value(30).endArray() .startArray().value(-50).value(-30).endArray() .startArray().value(50).value(-39).endArray() .endArray() .endObject(); try (XContentParser parser = createParser(multilinesGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test #4: "envelope" with empty coordinates multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", randomBoolean() ? "envelope" : "bbox") .startArray("coordinates") .endArray() .endObject(); try (XContentParser parser = createParser(multilinesGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @Override public void testParsePolygon() throws IOException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .endArray() .endObject(); Polygon p = new Polygon( new LinearRing( new double[] {100d, 101d, 101d, 100d, 100d}, new double[] {1d, 1d, 0d, 0d, 1d} )); assertGeometryEquals(p, polygonGeoJson); } public void testParse3DPolygon() throws IOException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).value(10.0).endArray() .startArray().value(101.0).value(1.0).value(10.0).endArray() .startArray().value(101.0).value(0.0).value(10.0).endArray() .startArray().value(100.0).value(0.0).value(10.0).endArray() .startArray().value(100.0).value(1.0).value(10.0).endArray() .endArray() .endArray() .endObject(); Polygon expected = new Polygon(new LinearRing( new double[]{100.0, 101.0, 101.0, 100.0, 100.0}, new double[]{1.0, 1.0, 0.0, 0.0, 1.0}, new double[]{10.0, 10.0, 10.0, 10.0, 10.0} )); try (XContentParser parser = createParser(polygonGeoJson)) { parser.nextToken(); assertEquals(expected, new GeoJson(true, false, new GeographyValidator(true)).fromXContent(parser)); } } public void testInvalidDimensionalPolygon() throws IOException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).value(10.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).value(10.0).endArray() .startArray().value(100.0).value(0.0).value(10.0).endArray() .startArray().value(100.0).value(1.0).value(10.0).endArray() .endArray() .endArray() .endObject(); try (XContentParser parser = createParser(polygonGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(true)).fromXContent(parser)); assertNull(parser.nextToken()); } } public void testParseInvalidPoint() throws IOException { // test case 1: create an invalid point object with multipoint data format XContentBuilder invalidPoint1 = XContentFactory.jsonBuilder() .startObject() .field("type", "point") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject(); try (XContentParser parser = createParser(invalidPoint1)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 2: create an invalid point object with an empty number of coordinates XContentBuilder invalidPoint2 = XContentFactory.jsonBuilder() .startObject() .field("type", "point") .startArray("coordinates") .endArray() .endObject(); try (XContentParser parser = createParser(invalidPoint2)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } public void testParseInvalidMultipoint() throws IOException { // test case 1: create an invalid multipoint object with single coordinate XContentBuilder invalidMultipoint1 = XContentFactory.jsonBuilder() .startObject() .field("type", "multipoint") .startArray("coordinates").value(-74.011).value(40.753).endArray() .endObject(); try (XContentParser parser = createParser(invalidMultipoint1)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 2: create an invalid multipoint object with null coordinate XContentBuilder invalidMultipoint2 = XContentFactory.jsonBuilder() .startObject() .field("type", "multipoint") .startArray("coordinates") .endArray() .endObject(); try (XContentParser parser = createParser(invalidMultipoint2)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates XContentBuilder invalidMultipoint3 = XContentFactory.jsonBuilder() .startObject() .field("type", "multipoint") .startArray("coordinates") .startArray().endArray() .endArray() .endObject(); try (XContentParser parser = createParser(invalidMultipoint3)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } public void testParseInvalidDimensionalMultiPolygon() throws IOException { // test invalid multipolygon (an "accidental" polygon with inner rings outside outer ring) String multiPolygonGeoJson = Strings.toString(XContentFactory.jsonBuilder() .startObject() .field("type", "MultiPolygon") .startArray("coordinates") .startArray()//first poly (without holes) .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .startArray().value(102.0).value(3.0).endArray() .startArray().value(102.0).value(2.0).endArray() .endArray() .endArray() .startArray()//second poly (with hole) .startArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(100.0).value(0.0).endArray() .endArray() .startArray()//hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).value(10.0).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } public void testParseInvalidPolygon() throws IOException { /* * The following 3 test cases ensure proper error handling of invalid polygons * per the GeoJSON specification */ // test case 1: create an invalid polygon with only 2 points String invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .startArray().value(-75.022).value(41.783).endArray() .endArray() .endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 2: create an invalid polygon with only 1 point invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .endArray() .endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 3: create an invalid polygon with 0 points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().endArray() .endArray() .endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 4: create an invalid polygon with null value points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().nullValue().nullValue().endArray() .endArray() .endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 5: create an invalid polygon with 1 invalid LinearRing invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .nullValue().nullValue() .endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 6: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // test case 7: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } public void testParsePolygonWithHole() throws IOException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .startArray() .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endObject(); LinearRing hole = new LinearRing( new double[] {100.2d, 100.2d, 100.8d, 100.8d, 100.2d}, new double[] {0.8d, 0.2d, 0.2d, 0.8d, 0.8d}); Polygon p = new Polygon(new LinearRing( new double[] {100d, 101d, 101d, 100d, 100d}, new double[] {1d, 1d, 0d, 0d, 1d}), Collections.singletonList(hole)); assertGeometryEquals(p, polygonGeoJson); } @Override public void testParseMultiPoint() throws IOException { XContentBuilder multiPointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "MultiPoint") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject(); assertGeometryEquals(new MultiPoint(Arrays.asList( new Point(100, 0), new Point(101, 1))), multiPointGeoJson); } @Override public void testParseMultiPolygon() throws IOException { // two polygons; one without hole, one with hole XContentBuilder multiPolygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "MultiPolygon") .startArray("coordinates") .startArray()//first poly (without holes) .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .startArray().value(102.0).value(3.0).endArray() .startArray().value(102.0).value(2.0).endArray() .endArray() .endArray() .startArray()//second poly (with hole) .startArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(100.0).value(0.0).endArray() .endArray() .startArray()//hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject(); LinearRing hole = new LinearRing( new double[] {100.2d, 100.2d, 100.8d, 100.8d, 100.2d}, new double[] {0.8d, 0.2d, 0.2d, 0.8d, 0.8d}); MultiPolygon polygons = new MultiPolygon(Arrays.asList( new Polygon(new LinearRing( new double[] {102d, 103d, 103d, 102d, 102d}, new double[] {2d, 2d, 3d, 3d, 2d})), new Polygon(new LinearRing( new double[] {100d, 101d, 101d, 100d, 100d}, new double[] {0d, 0d, 1d, 1d, 0d}), Collections.singletonList(hole)))); assertGeometryEquals(polygons, multiPolygonGeoJson); } @Override public void testParseGeometryCollection() throws IOException { XContentBuilder geometryCollectionGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "GeometryCollection") .startArray("geometries") .startObject() .field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject() .startObject() .field("type", "Point") .startArray("coordinates").value(102.0).value(2.0).endArray() .endObject() .startObject() .field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .endArray() .endArray() .endObject() .endArray() .endObject(); GeometryCollection<Geometry> geometryExpected = new GeometryCollection<> (Arrays.asList( new Line(new double[] {100d, 101d}, new double[] {0d, 1d}), new Point(102d, 2d), new Polygon(new LinearRing( new double[] {-177, 176, 172, 176, -177, -177}, new double[] {10, 15, 0, -15, -10, 10} )) )); assertGeometryEquals(geometryExpected, geometryCollectionGeoJson); } public void testThatParserExtractsCorrectTypeAndCoordinatesFromArbitraryJson() throws IOException, ParseException { XContentBuilder pointGeoJson = XContentFactory.jsonBuilder() .startObject() .startObject("crs") .field("type", "name") .startObject("properties") .field("name", "urn:ogc:def:crs:OGC:1.3:CRS84") .endObject() .endObject() .field("bbox", "foobar") .field("type", "point") .field("bubu", "foobar") .startArray("coordinates").value(100.0).value(0.0).endArray() .startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject() .startObject("lala").field("type", "NotAPoint").endObject() .endObject(); Point expectedPt = new Point(100, 0); assertGeometryEquals(expectedPt, pointGeoJson, false); } public void testParseOrientationOption() throws IOException { // test 1: valid ccw (right handed system) poly not crossing dateline (with 'right' field) XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", randomFrom("ccw", "right")) .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject(); Polygon expected = new Polygon( new LinearRing(new double[]{176.0, -177.0, -177.0, 176.0, 172.0, 176.0}, new double[]{15.0, 10.0, -10.0, -15.0, 0.0, 15.0}), Collections.singletonList( new LinearRing(new double[]{-172.0, 174.0, -172.0, -172.0}, new double[]{8.0, 10.0, -8.0, 8.0}) )); assertGeometryEquals(expected, polygonGeoJson); // test 2: valid cw poly polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") .field("orientation", randomFrom("cw", "left")) .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject(); expected = new Polygon( new LinearRing(new double[]{176.0, 172.0, 176.0, -177.0, -177.0, 176.0}, new double[]{15.0, 0.0, -15.0, -10.0, 10.0, 15.0}), Collections.singletonList( new LinearRing(new double[]{-172.0, -172.0, 174.0, -172.0}, new double[]{8.0, -8.0, 10.0, 8.0}) )); assertGeometryEquals(expected, polygonGeoJson); } public void testParseInvalidShapes() throws IOException { // single dimensions point XContentBuilder tooLittlePointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Point") .startArray("coordinates").value(10.0).endArray() .endObject(); try (XContentParser parser = createParser(tooLittlePointGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } // zero dimensions point XContentBuilder emptyPointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Point") .startObject("coordinates").field("foo", "bar").endObject() .endObject(); try (XContentParser parser = createParser(emptyPointGeoJson)) { parser.nextToken(); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } public void testParseInvalidGeometryCollectionShapes() throws IOException { // single dimensions point XContentBuilder invalidPoints = XContentFactory.jsonBuilder() .startObject() .startObject("foo") .field("type", "geometrycollection") .startArray("geometries") .startObject() .field("type", "polygon") .startArray("coordinates") .startArray().value("46.6022226498514").value("24.7237442867977").endArray() .startArray().value("46.6031857243798").value("24.722968774929").endArray() .endArray() // coordinates .endObject() .endArray() // geometries .endObject() .endObject(); try (XContentParser parser = createParser(invalidPoints)) { parser.nextToken(); // foo parser.nextToken(); // start object parser.nextToken(); // start object expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // end of the document assertNull(parser.nextToken()); // no more elements afterwards } } }
package ngo.music.soundcloudplayer.api; import org.apache.http.ConnectionReuseStrategy; import org.apache.http.Header; import org.apache.http.HeaderElement; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.HttpResponseInterceptor; import org.apache.http.HttpStatus; import org.apache.http.NameValuePair; import org.apache.http.auth.AUTH; import org.apache.http.auth.AuthScope; import org.apache.http.client.AuthenticationHandler; import org.apache.http.client.HttpClient; import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.client.RedirectHandler; import org.apache.http.client.RequestDirector; import org.apache.http.client.UserTokenHandler; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.params.HttpClientParams; import org.apache.http.client.protocol.ClientContext; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.ConnectionKeepAliveStrategy; import org.apache.http.conn.params.ConnManagerPNames; import org.apache.http.conn.params.ConnManagerParams; import org.apache.http.conn.params.ConnPerRoute; import org.apache.http.conn.params.ConnPerRouteBean; import org.apache.http.conn.params.ConnRoutePNames; import org.apache.http.conn.routing.HttpRoute; import org.apache.http.conn.routing.HttpRoutePlanner; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.scheme.SocketFactory; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.DefaultRequestDirector; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.message.BasicHeader; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpParams; import org.apache.http.params.HttpProtocolParams; import org.apache.http.protocol.BasicHttpProcessor; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpProcessor; import org.apache.http.protocol.HttpRequestExecutor; import org.json.JSONException; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.net.URI; import java.util.Arrays; /** * Interface with SoundCloud, using OAuth2. * This API wrapper makes a few assumptions - namely: * <ul> * <li>Server responses are always requested in JSON format</li> * <li>Refresh-token handling is transparent to the client application (you should not need to * call <code>refreshToken()</code> manually) * </li> * <li>You use <a href="http://hc.apache.org/httpcomponents-client-ga/">Apache HttpClient</a></li> * </ul> * Example usage: * <code> * <pre> * ApiWrapper wrapper = new ApiWrapper("client_id", "client_secret", null, null, Env.SANDBOX); * wrapper.login("login", "password"); * HttpResponse response = wrapper.get(Request.to("/tracks")); * </pre> * </code> * @see <a href="http://developers.soundcloud.com/docs">Using the SoundCloud API</a> */ public class ApiWrapper implements CloudAPI, Serializable { public static final String DEFAULT_CONTENT_TYPE = "application/json"; private static final long serialVersionUID = 3662083416905771921L; private static final Token EMPTY_TOKEN = new Token(null, null); /** The current environment, only live possible for now */ public final Env env = Env.LIVE; private Token mToken; private final String mClientId, mClientSecret; private final URI mRedirectUri; transient private HttpClient httpClient; transient private TokenListener listener; private String mDefaultContentType; private String mDefaultAcceptEncoding; public static final int BUFFER_SIZE = 8192; /** Connection timeout */ public static final int TIMEOUT = 20 * 1000; /** Keepalive timeout */ public static final long KEEPALIVE_TIMEOUT = 20 * 1000; /* maximum number of connections allowed */ public static final int MAX_TOTAL_CONNECTIONS = 10; /* spam response code from API */ public static final int STATUS_CODE_SPAM_WARNING = 429; /** debug request details to stderr */ public boolean debugRequests; /** * Constructs a new ApiWrapper instance. * * @param clientId the application client id * @param clientSecret the application client secret * @param redirectUri the registered redirect url, or null * @param token an valid token, or null if not known * @see <a href="http://developers.soundcloud.com/docs#authentication">API authentication documentation</a> */ public ApiWrapper(String clientId, String clientSecret, URI redirectUri, Token token) { mClientId = clientId; mClientSecret = clientSecret; mRedirectUri = redirectUri; mToken = token == null ? EMPTY_TOKEN : token; } @Override public Token login(String username, String password, String... scopes) throws IOException { if (username == null || password == null) { throw new IllegalArgumentException("username or password is null"); } final Request request = addScope(Request.to(Endpoints.TOKEN).with( GRANT_TYPE, PASSWORD, CLIENT_ID, mClientId, CLIENT_SECRET, mClientSecret, USERNAME, username, PASSWORD, password), scopes); mToken = requestToken(request); return mToken; } @Override public Token authorizationCode(String code, String... scopes) throws IOException { if (code == null) { throw new IllegalArgumentException("code is null"); } final Request request = addScope(Request.to(Endpoints.TOKEN).with( GRANT_TYPE, AUTHORIZATION_CODE, CLIENT_ID, mClientId, CLIENT_SECRET, mClientSecret, REDIRECT_URI, mRedirectUri, CODE, code), scopes); mToken = requestToken(request); return mToken; } @Override public Token clientCredentials(String... scopes) throws IOException { final Request req = addScope(Request.to(Endpoints.TOKEN).with( GRANT_TYPE, CLIENT_CREDENTIALS, CLIENT_ID, mClientId, CLIENT_SECRET, mClientSecret), scopes); final Token token = requestToken(req); if (scopes != null) { for (String scope : scopes) { if (!token.scoped(scope)) { throw new InvalidTokenException(-1, "Could not obtain requested scope '"+scope+"' (got: '" + token.scope + "')"); } } } return token; } @Override public Token extensionGrantType(String grantType, String... scopes) throws IOException { final Request req = addScope(Request.to(Endpoints.TOKEN).with( GRANT_TYPE, grantType, CLIENT_ID, mClientId, CLIENT_SECRET, mClientSecret), scopes); mToken = requestToken(req); return mToken; } @Override public Token refreshToken() throws IOException { if (mToken == null || mToken.refresh == null) throw new IllegalStateException("no refresh token available"); mToken = requestToken(Request.to(Endpoints.TOKEN).with( GRANT_TYPE, REFRESH_TOKEN, CLIENT_ID, mClientId, CLIENT_SECRET, mClientSecret, REFRESH_TOKEN, mToken.refresh)); return mToken; } @Override public Token invalidateToken() { if (mToken != null) { Token alternative = listener == null ? null : listener.onTokenInvalid(mToken); mToken.invalidate(); if (alternative != null) { mToken = alternative; return mToken; } else { return null; } } else { return null; } } @Override public URI authorizationCodeUrl(String... options) { final Request req = Request.to(options.length == 0 ? Endpoints.CONNECT : options[0]).with( REDIRECT_URI, mRedirectUri, CLIENT_ID, mClientId, RESPONSE_TYPE, CODE); System.out.println ("REQUES = " + req.toString()); if (options.length > 1) req.add(SCOPE, options[1]); if (options.length > 2) req.add(DISPLAY, options[2]); if (options.length > 3) req.add(STATE, options[3]); return getURI(req, false, true); } /** * Constructs URI path for a given resource. * @param request the resource to access * @param api api or web * @param secure whether to use SSL or not * @return a valid URI */ public URI getURI(Request request, boolean api, boolean secure) { final URI uri = api ? env.getResourceURI(secure) : env.getAuthResourceURI(secure); return uri.resolve(request.toUrl()); } /** * User-Agent to identify ourselves with - defaults to USER_AGENT * @return the agent to use * @see CloudAPI#USER_AGENT */ public String getUserAgent() { return USER_AGENT; } /** * Request an OAuth2 token from SoundCloud * @param request the token request * @return the token * @throws java.io.IOException network error * @throws com.com.soundcloud.api.CloudAPI.InvalidTokenException unauthorized * @throws com.com.soundcloud.api.CloudAPI.ApiResponseException http error */ public Token requestToken(Request request) throws IOException { HttpResponse response = safeExecute(env.sslResourceHost, request.buildRequest(HttpPost.class)); final int status = response.getStatusLine().getStatusCode(); String error; try { if (status == HttpStatus.SC_OK) { final Token token = new Token(Http.getJSON(response)); if (listener != null) listener.onTokenRefreshed(token); return token; } else { error = Http.getJSON(response).getString("error"); } } catch (IOException ignored) { error = ignored.getMessage(); } catch (JSONException ignored) { error = ignored.getMessage(); } throw status == HttpStatus.SC_UNAUTHORIZED ? new InvalidTokenException(status, error) : new ApiResponseException(response, error); } /** * @return the default HttpParams * @see <a href="http://developer.android.com/reference/android/net/http/AndroidHttpClient.html#newInstance(java.lang.String, android.content.Context)"> * android.net.http.AndroidHttpClient#newInstance(String, Context)</a> */ protected HttpParams getParams() { final HttpParams params = new BasicHttpParams(); HttpConnectionParams.setConnectionTimeout(params, TIMEOUT); HttpConnectionParams.setSoTimeout(params, TIMEOUT); HttpConnectionParams.setSocketBufferSize(params, BUFFER_SIZE); ConnManagerParams.setMaxTotalConnections(params, MAX_TOTAL_CONNECTIONS); // Turn off stale checking. Our connections break all the time anyway, // and it's not worth it to pay the penalty of checking every time. HttpConnectionParams.setStaleCheckingEnabled(params, false); // fix contributed by Bjorn Roche XXX check if still needed params.setBooleanParameter("http.protocol.expect-continue", false); params.setParameter(ConnManagerPNames.MAX_CONNECTIONS_PER_ROUTE, new ConnPerRoute() { @Override public int getMaxForRoute(HttpRoute httpRoute) { if (env.isApiHost(httpRoute.getTargetHost())) { // there will be a lot of concurrent request to the API host return MAX_TOTAL_CONNECTIONS; } else { return ConnPerRouteBean.DEFAULT_MAX_CONNECTIONS_PER_ROUTE; } } }); // apply system proxy settings final String proxyHost = System.getProperty("http.proxyHost"); final String proxyPort = System.getProperty("http.proxyPort"); if (proxyHost != null) { int port = 80; try { port = Integer.parseInt(proxyPort); } catch (NumberFormatException ignored) { } params.setParameter(ConnRoutePNames.DEFAULT_PROXY, new HttpHost(proxyHost, port)); } return params; } /** * @param proxy the proxy to use for the wrapper, or null to clear the current one. */ public void setProxy(URI proxy) { final HttpHost host; if (proxy != null) { Scheme scheme = getHttpClient() .getConnectionManager() .getSchemeRegistry() .getScheme(proxy.getScheme()); host = new HttpHost(proxy.getHost(), scheme.resolvePort(proxy.getPort()), scheme.getName()); } else { host = null; } getHttpClient().getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, host); } public URI getProxy() { Object proxy = getHttpClient().getParams().getParameter(ConnRoutePNames.DEFAULT_PROXY); if (proxy instanceof HttpHost) { return URI.create(((HttpHost)proxy).toURI()); } else { return null; } } public boolean isProxySet() { return getProxy() != null; } /** * @return SocketFactory used by the underlying HttpClient */ protected SocketFactory getSocketFactory() { return PlainSocketFactory.getSocketFactory(); } /** * @return SSL SocketFactory used by the underlying HttpClient */ protected SSLSocketFactory getSSLSocketFactory() { return SSLSocketFactory.getSocketFactory(); } /** @return The HttpClient instance used to make the calls */ public HttpClient getHttpClient() { if (httpClient == null) { final HttpParams params = getParams(); HttpClientParams.setRedirecting(params, false); HttpProtocolParams.setUserAgent(params, getUserAgent()); final SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", getSocketFactory(), 80)); final SSLSocketFactory sslFactory = getSSLSocketFactory(); registry.register(new Scheme("https", sslFactory, 443)); httpClient = new DefaultHttpClient( new ThreadSafeClientConnManager(params, registry), params) { { setKeepAliveStrategy(new ConnectionKeepAliveStrategy() { @Override public long getKeepAliveDuration(HttpResponse httpResponse, HttpContext httpContext) { return KEEPALIVE_TIMEOUT; } }); getCredentialsProvider().setCredentials( new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, CloudAPI.REALM, OAUTH_SCHEME), OAuth2Scheme.EmptyCredentials.INSTANCE); getAuthSchemes().register(CloudAPI.OAUTH_SCHEME, new OAuth2Scheme.Factory(ApiWrapper.this)); addResponseInterceptor(new HttpResponseInterceptor() { @Override public void process(HttpResponse response, HttpContext context) throws HttpException, IOException { if (response == null || response.getEntity() == null) return; HttpEntity entity = response.getEntity(); Header header = entity.getContentEncoding(); if (header != null) { for (HeaderElement codec : header.getElements()) { if (codec.getName().equalsIgnoreCase("gzip")) { response.setEntity(new GzipDecompressingEntity(entity)); break; } } } } }); } @Override protected HttpContext createHttpContext() { HttpContext ctxt = super.createHttpContext(); ctxt.setAttribute(ClientContext.AUTH_SCHEME_PREF, Arrays.asList(CloudAPI.OAUTH_SCHEME, "digest", "basic")); return ctxt; } @Override protected BasicHttpProcessor createHttpProcessor() { BasicHttpProcessor processor = super.createHttpProcessor(); processor.addInterceptor(new OAuth2HttpRequestInterceptor()); return processor; } // for testability only @Override protected RequestDirector createClientRequestDirector(HttpRequestExecutor requestExec, ClientConnectionManager conman, ConnectionReuseStrategy reustrat, ConnectionKeepAliveStrategy kastrat, HttpRoutePlanner rouplan, HttpProcessor httpProcessor, HttpRequestRetryHandler retryHandler, RedirectHandler redirectHandler, AuthenticationHandler targetAuthHandler, AuthenticationHandler proxyAuthHandler, UserTokenHandler stateHandler, HttpParams params) { return getRequestDirector(requestExec, conman, reustrat, kastrat, rouplan, httpProcessor, retryHandler, redirectHandler, targetAuthHandler, proxyAuthHandler, stateHandler, params); } }; } return httpClient; } @Override public long resolve(String url) throws IOException { HttpResponse resp = get(Request.to(Endpoints.RESOLVE).with("url", url)); if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY) { Header location = resp.getFirstHeader("Location"); if (location != null && location.getValue() != null) { final String path = URI.create(location.getValue()).getPath(); if (path != null && path.contains("/")) { try { final String id = path.substring(path.lastIndexOf("/") + 1); return Integer.parseInt(id); } catch (NumberFormatException e) { throw new ResolverException(e, resp); } } else { throw new ResolverException("Invalid string:"+path, resp); } } else { throw new ResolverException("No location header", resp); } } else { throw new ResolverException("Invalid status code", resp); } } @Override public Stream resolveStreamUrl(final String url, boolean skipLogging) throws IOException { HttpResponse resp = safeExecute(null, addHeaders(Request.to(url).buildRequest(HttpHead.class))); if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY) { Header location = resp.getFirstHeader("Location"); if (location != null && location.getValue() != null) { final String headRedirect = location.getValue(); resp = safeExecute(null, new HttpHead(headRedirect)); if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { Stream stream = new Stream(url, headRedirect, resp); // need to do another GET request to have a URL ready for client usage Request req = Request.to(url); if (skipLogging) { // skip logging req.with("skip_logging", "1"); } resp = safeExecute(null, addHeaders(Request.to(url).buildRequest(HttpGet.class))); if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY) { return stream.withNewStreamUrl(resp.getFirstHeader("Location").getValue()); } else { throw new ResolverException("Unexpected response code", resp); } } else { throw new ResolverException("Unexpected response code", resp); } } else { throw new ResolverException("Location header not set", resp); } } else { throw new ResolverException("Unexpected response code", resp); } } @Override public HttpResponse head(Request request) throws IOException { return execute(request, HttpHead.class); } @Override public HttpResponse get(Request request) throws IOException { return execute(request, HttpGet.class); } @Override public HttpResponse put(Request request) throws IOException { return execute(request, HttpPut.class); } @Override public HttpResponse post(Request request) throws IOException { return execute(request, HttpPost.class); } @Override public HttpResponse delete(Request request) throws IOException { return execute(request, HttpDelete.class); } @Override public Token getToken() { return mToken; } @Override public void setToken(Token newToken) { mToken = newToken == null ? EMPTY_TOKEN : newToken; } @Override public synchronized void setTokenListener(TokenListener listener) { this.listener = listener; } /** * Execute an API request, adds the necessary headers. * @param request the HTTP request * @return the HTTP response * @throws java.io.IOException network error etc. */ public HttpResponse execute(HttpUriRequest request) throws IOException { return safeExecute(env.sslResourceHost, addHeaders(request)); } public HttpResponse safeExecute(HttpHost target, HttpUriRequest request) throws IOException { if (target == null) { target = determineTarget(request); } try { return getHttpClient().execute(target, request); } catch (NullPointerException e) { // this is a workaround for a broken httpclient version, // cf. http://code.google.com/p/android/issues/detail?id=5255 // NPE in DefaultRequestDirector.java:456 if (!request.isAborted() && request.getParams().isParameterFalse("npe-retried")) { request.getParams().setBooleanParameter("npe-retried", true); return safeExecute(target, request); } else { request.abort(); throw new BrokenHttpClientException(e); } } catch (IllegalArgumentException e) { // more brokenness // cf. http://code.google.com/p/android/issues/detail?id=2690 request.abort(); throw new BrokenHttpClientException(e); } catch (ArrayIndexOutOfBoundsException e) { // Caused by: java.lang.ArrayIndexOutOfBoundsException: length=7; index=-9 // org.apache.harmony.security.asn1.DerInputStream.readBitString(DerInputStream.java:72)) // org.apache.harmony.security.asn1.ASN1BitString.decode(ASN1BitString.java:64) // ... // org.apache.http.conn.ssl.SSLSocketFactory.createSocket(SSLSocketFactory.java:375) request.abort(); throw new BrokenHttpClientException(e); } } protected HttpResponse execute(Request req, Class<? extends HttpRequestBase> reqType) throws IOException { Request defaults = ApiWrapper.defaultParams.get(); if (defaults != null && !defaults.getParams().isEmpty()) { // copy + merge in default parameters for (NameValuePair nvp : defaults) { req = new Request(req); req.add(nvp.getName(), nvp.getValue()); } } logRequest(reqType, req); return execute(addClientIdIfNecessary(req).buildRequest(reqType)); } protected Request addClientIdIfNecessary(Request req) { return req.getParams().containsKey(CLIENT_ID) ? req : new Request(req).add(CLIENT_ID, mClientId); } protected void logRequest( Class<? extends HttpRequestBase> reqType, Request request) { if (debugRequests) System.err.println(reqType.getSimpleName()+" "+request); } protected HttpHost determineTarget(HttpUriRequest request) { // A null target may be acceptable if there is a default target. // Otherwise, the null target is detected in the director. URI requestURI = request.getURI(); if (requestURI.isAbsolute()) { return new HttpHost( requestURI.getHost(), requestURI.getPort(), requestURI.getScheme()); } else { return null; } } /** * serialize the wrapper to a File * @param f target * @throws java.io.IOException IO problems */ public void toFile(File f) throws IOException { ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(f)); oos.writeObject(this); oos.close(); } public String getDefaultContentType() { return (mDefaultContentType == null) ? DEFAULT_CONTENT_TYPE : mDefaultContentType; } public void setDefaultContentType(String contentType) { mDefaultContentType = contentType; } public String getDefaultAcceptEncoding() { return mDefaultAcceptEncoding; } public void setDefaultAcceptEncoding(String encoding) { mDefaultAcceptEncoding = encoding; } /* package */ static Request addScope(Request request, String[] scopes) { if (scopes != null && scopes.length > 0) { StringBuilder scope = new StringBuilder(); for (int i=0; i<scopes.length; i++) { scope.append(scopes[i]); if (i < scopes.length-1) scope.append(" "); } request.add(SCOPE, scope.toString()); } return request; } /** * Read wrapper from a file * @param f the file * @return the wrapper * @throws IOException IO problems * @throws ClassNotFoundException class not found */ public static ApiWrapper fromFile(File f) throws IOException, ClassNotFoundException { ObjectInputStream ois = new ObjectInputStream(new FileInputStream(f)); try { return (ApiWrapper) ois.readObject(); } finally { ois.close(); } } /** Creates an OAuth2 header for the given token */ public static Header createOAuthHeader(Token token) { return new BasicHeader(AUTH.WWW_AUTH_RESP, "OAuth " + (token == null || !token.valid() ? "invalidated" : token.access)); } /** Adds an OAuth2 header to a given request */ protected HttpUriRequest addAuthHeader(HttpUriRequest request) { if (!request.containsHeader(AUTH.WWW_AUTH_RESP)) { if (mToken != EMPTY_TOKEN) { request.addHeader(createOAuthHeader(mToken)); } } return request; } /** Forces JSON */ protected HttpUriRequest addAcceptHeader(HttpUriRequest request) { if (!request.containsHeader("Accept")) { request.addHeader("Accept", getDefaultContentType()); } return request; } /** Adds all required headers to the request */ protected HttpUriRequest addHeaders(HttpUriRequest req) { return addAcceptHeader(addAuthHeader(addEncodingHeader(req))); } protected HttpUriRequest addEncodingHeader(HttpUriRequest req) { if (getDefaultAcceptEncoding() != null) { req.addHeader("Accept-Encoding", mDefaultAcceptEncoding); } return req; } /** This method mainly exists to make the wrapper more testable. oh, apache's insanity. */ protected RequestDirector getRequestDirector(HttpRequestExecutor requestExec, ClientConnectionManager conman, ConnectionReuseStrategy reustrat, ConnectionKeepAliveStrategy kastrat, HttpRoutePlanner rouplan, HttpProcessor httpProcessor, HttpRequestRetryHandler retryHandler, RedirectHandler redirectHandler, AuthenticationHandler targetAuthHandler, AuthenticationHandler proxyAuthHandler, UserTokenHandler stateHandler, HttpParams params ) { return new DefaultRequestDirector(requestExec, conman, reustrat, kastrat, rouplan, httpProcessor, retryHandler, redirectHandler, targetAuthHandler, proxyAuthHandler, stateHandler, params); } private static final ThreadLocal<Request> defaultParams = new ThreadLocal<Request>() { @Override protected Request initialValue() { return new Request(); } }; /** * Adds a default parameter which will get added to all requests in this thread. * Use this method carefully since it might lead to unexpected side-effects. * @param name the name of the parameter * @param value the value of the parameter. */ public static void setDefaultParameter(String name, String value) { defaultParams.get().set(name, value); } /** * Clears the default parameters. */ public static void clearDefaultParameters() { defaultParams.remove(); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.flex.forks.batik.bridge; import java.awt.RenderingHints; import org.apache.flex.forks.batik.css.engine.CSSEngineEvent; import org.apache.flex.forks.batik.css.engine.SVGCSSEngine; import org.apache.flex.forks.batik.gvt.GraphicsNode; import org.apache.flex.forks.batik.gvt.ShapeNode; import org.apache.flex.forks.batik.gvt.ShapePainter; import org.w3c.dom.Element; /** * The base bridge class for shapes. Subclasses bridge <tt>ShapeNode</tt>. * * @author <a href="mailto:tkormann@apache.org">Thierry Kormann</a> * @version $Id: SVGShapeElementBridge.java 475477 2006-11-15 22:44:28Z cam $ */ public abstract class SVGShapeElementBridge extends AbstractGraphicsNodeBridge { /** * Constructs a new bridge for SVG shapes. */ protected SVGShapeElementBridge() {} /** * Creates a graphics node using the specified BridgeContext and * for the specified element. * * @param ctx the bridge context to use * @param e the element that describes the graphics node to build * @return a graphics node that represents the specified element */ public GraphicsNode createGraphicsNode(BridgeContext ctx, Element e) { ShapeNode shapeNode = (ShapeNode)super.createGraphicsNode(ctx, e); if (shapeNode == null) { return null; } associateSVGContext(ctx, e, shapeNode); // delegates to subclasses the shape construction buildShape(ctx, e, shapeNode); // 'shape-rendering' and 'color-rendering' RenderingHints hints = null; hints = CSSUtilities.convertColorRendering(e, hints); hints = CSSUtilities.convertShapeRendering(e, hints); if (hints != null) shapeNode.setRenderingHints(hints); return shapeNode; } /** * Creates a <tt>ShapeNode</tt>. */ protected GraphicsNode instantiateGraphicsNode() { return new ShapeNode(); } /** * Builds using the specified BridgeContext and element, the * specified graphics node. * * @param ctx the bridge context to use * @param e the element that describes the graphics node to build * @param node the graphics node to build */ public void buildGraphicsNode(BridgeContext ctx, Element e, GraphicsNode node) { ShapeNode shapeNode = (ShapeNode)node; shapeNode.setShapePainter(createShapePainter(ctx, e, shapeNode)); super.buildGraphicsNode(ctx, e, node); } /** * Creates the shape painter associated to the specified element. * This implementation creates a shape painter considering the * various fill and stroke properties. * * @param ctx the bridge context to use * @param e the element that describes the shape painter to use * @param shapeNode the shape node that is interested in its shape painter */ protected ShapePainter createShapePainter(BridgeContext ctx, Element e, ShapeNode shapeNode) { // 'fill' // 'fill-opacity' // 'stroke' // 'stroke-opacity', // 'stroke-width' // 'stroke-linecap' // 'stroke-linejoin' // 'stroke-miterlimit' // 'stroke-dasharray' // 'stroke-dashoffset' return PaintServer.convertFillAndStroke(e, shapeNode, ctx); } /** * Initializes the specified ShapeNode's shape defined by the * specified Element and using the specified bridge context. * * @param ctx the bridge context to use * @param e the element that describes the shape node to build * @param node the shape node to initialize */ protected abstract void buildShape(BridgeContext ctx, Element e, ShapeNode node); /** * Returns false as shapes are not a container. */ public boolean isComposite() { return false; } // BridgeUpdateHandler implementation ////////////////////////////////// /** * Invoked when the geometry of an graphical element has changed. */ protected void handleGeometryChanged() { super.handleGeometryChanged(); ShapeNode shapeNode = (ShapeNode)node; shapeNode.setShapePainter(createShapePainter(ctx, e, shapeNode)); } /** * This flag bit indicates if a new shape painter has already been created. * Avoid creating one ShapePainter per CSS property change */ protected boolean hasNewShapePainter; /** * Invoked when CSS properties have changed on an element. * * @param evt the CSSEngine event that describes the update */ public void handleCSSEngineEvent(CSSEngineEvent evt) { hasNewShapePainter = false; super.handleCSSEngineEvent(evt); } /** * Invoked for each CSS property that has changed. */ protected void handleCSSPropertyChanged(int property) { switch(property) { case SVGCSSEngine.FILL_INDEX: case SVGCSSEngine.FILL_OPACITY_INDEX: case SVGCSSEngine.STROKE_INDEX: case SVGCSSEngine.STROKE_OPACITY_INDEX: // Opportunity to just 'update' the existing shape painters... case SVGCSSEngine.STROKE_WIDTH_INDEX: case SVGCSSEngine.STROKE_LINECAP_INDEX: case SVGCSSEngine.STROKE_LINEJOIN_INDEX: case SVGCSSEngine.STROKE_MITERLIMIT_INDEX: case SVGCSSEngine.STROKE_DASHARRAY_INDEX: case SVGCSSEngine.STROKE_DASHOFFSET_INDEX: { if (!hasNewShapePainter) { hasNewShapePainter = true; ShapeNode shapeNode = (ShapeNode)node; shapeNode.setShapePainter(createShapePainter(ctx, e, shapeNode)); } break; } case SVGCSSEngine.SHAPE_RENDERING_INDEX: { RenderingHints hints = node.getRenderingHints(); hints = CSSUtilities.convertShapeRendering(e, hints); if (hints != null) { node.setRenderingHints(hints); } break; } case SVGCSSEngine.COLOR_RENDERING_INDEX: { RenderingHints hints = node.getRenderingHints(); hints = CSSUtilities.convertColorRendering(e, hints); if (hints != null) { node.setRenderingHints(hints); } break; } default: super.handleCSSPropertyChanged(property); } } }
package com.thinkbiganalytics.feedmgr.service.feed; /*- * #%L * thinkbig-feed-manager-controller * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import com.thinkbiganalytics.datalake.authorization.service.HadoopAuthorizationService; import com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder; import com.thinkbiganalytics.feedmgr.nifi.PropertyExpressionResolver; import com.thinkbiganalytics.feedmgr.nifi.TemplateConnectionUtil; import com.thinkbiganalytics.feedmgr.nifi.cache.NifiFlowCache; import com.thinkbiganalytics.feedmgr.rest.model.EntityVersion; import com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata; import com.thinkbiganalytics.feedmgr.rest.model.FeedSummary; import com.thinkbiganalytics.feedmgr.rest.model.FeedVersions; import com.thinkbiganalytics.feedmgr.rest.model.NifiFeed; import com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplate; import com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplateRequest; import com.thinkbiganalytics.feedmgr.rest.model.ReusableTemplateConnectionInfo; import com.thinkbiganalytics.feedmgr.rest.model.UIFeed; import com.thinkbiganalytics.feedmgr.rest.model.UserField; import com.thinkbiganalytics.feedmgr.rest.model.UserProperty; import com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl; import com.thinkbiganalytics.feedmgr.service.UserPropertyTransform; import com.thinkbiganalytics.feedmgr.service.feed.datasource.DerivedDatasourceFactory; import com.thinkbiganalytics.feedmgr.service.security.SecurityService; import com.thinkbiganalytics.feedmgr.service.template.FeedManagerTemplateService; import com.thinkbiganalytics.feedmgr.service.template.NiFiTemplateCache; import com.thinkbiganalytics.feedmgr.service.template.RegisteredTemplateService; import com.thinkbiganalytics.feedmgr.sla.ServiceLevelAgreementService; import com.thinkbiganalytics.metadata.api.MetadataAccess; import com.thinkbiganalytics.metadata.api.category.Category; import com.thinkbiganalytics.metadata.api.category.CategoryProvider; import com.thinkbiganalytics.metadata.api.category.security.CategoryAccessControl; import com.thinkbiganalytics.metadata.api.datasource.Datasource; import com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider; import com.thinkbiganalytics.metadata.api.datasource.DerivedDatasource; import com.thinkbiganalytics.metadata.api.event.MetadataChange; import com.thinkbiganalytics.metadata.api.event.MetadataEventListener; import com.thinkbiganalytics.metadata.api.event.MetadataEventService; import com.thinkbiganalytics.metadata.api.event.feed.FeedChange; import com.thinkbiganalytics.metadata.api.event.feed.FeedChangeEvent; import com.thinkbiganalytics.metadata.api.event.feed.FeedPropertyChangeEvent; import com.thinkbiganalytics.metadata.api.extension.UserFieldDescriptor; import com.thinkbiganalytics.metadata.api.feed.Feed; import com.thinkbiganalytics.metadata.api.feed.FeedDestination; import com.thinkbiganalytics.metadata.api.feed.FeedProperties; import com.thinkbiganalytics.metadata.api.feed.FeedProvider; import com.thinkbiganalytics.metadata.api.feed.FeedSource; import com.thinkbiganalytics.metadata.api.feed.OpsManagerFeedProvider; import com.thinkbiganalytics.metadata.api.feed.security.FeedAccessControl; import com.thinkbiganalytics.metadata.api.security.HadoopSecurityGroup; import com.thinkbiganalytics.metadata.api.template.FeedManagerTemplate; import com.thinkbiganalytics.metadata.api.template.FeedManagerTemplateProvider; import com.thinkbiganalytics.metadata.modeshape.MetadataRepositoryException; import com.thinkbiganalytics.metadata.rest.model.sla.Obligation; import com.thinkbiganalytics.metadata.sla.api.ObligationGroup; import com.thinkbiganalytics.metadata.sla.spi.ServiceLevelAgreementBuilder; import com.thinkbiganalytics.metadata.sla.spi.ServiceLevelAgreementProvider; import com.thinkbiganalytics.nifi.feedmgr.FeedRollbackException; import com.thinkbiganalytics.nifi.feedmgr.InputOutputPort; import com.thinkbiganalytics.nifi.rest.NiFiObjectCache; import com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient; import com.thinkbiganalytics.nifi.rest.model.NiFiPropertyDescriptorTransform; import com.thinkbiganalytics.nifi.rest.model.NifiProcessGroup; import com.thinkbiganalytics.nifi.rest.model.NifiProperty; import com.thinkbiganalytics.nifi.rest.support.NifiPropertyUtil; import com.thinkbiganalytics.policy.precondition.DependentFeedPrecondition; import com.thinkbiganalytics.policy.precondition.Precondition; import com.thinkbiganalytics.policy.precondition.transform.PreconditionPolicyTransformer; import com.thinkbiganalytics.policy.rest.model.FieldRuleProperty; import com.thinkbiganalytics.policy.rest.model.PreconditionRule; import com.thinkbiganalytics.rest.model.LabelValue; import com.thinkbiganalytics.security.AccessController; import com.thinkbiganalytics.security.action.Action; import com.thinkbiganalytics.support.FeedNameUtil; import org.apache.commons.collections.ListUtils; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.dao.DataAccessException; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.security.core.context.SecurityContextHolder; import java.io.Serializable; import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.inject.Inject; import javax.ws.rs.NotFoundException; public class DefaultFeedManagerFeedService implements FeedManagerFeedService { private static final Logger log = LoggerFactory.getLogger(DefaultFeedManagerFeedService.class); private static final Pageable PAGE_ALL = new PageRequest(0, Integer.MAX_VALUE); /** * Event listener for precondition events */ private final MetadataEventListener<FeedPropertyChangeEvent> feedPropertyChangeListener = new FeedPropertyChangeDispatcher(); @Inject FeedManagerTemplateProvider templateProvider; @Inject FeedManagerTemplateService templateRestProvider; @Inject FeedManagerPreconditionService feedPreconditionModelTransform; @Inject FeedModelTransform feedModelTransform; @Inject ServiceLevelAgreementProvider slaProvider; @Inject ServiceLevelAgreementService serviceLevelAgreementService; @Inject OpsManagerFeedProvider opsManagerFeedProvider; @Inject private DatasourceProvider datasourceProvider; /** * Metadata event service */ @Inject private AccessController accessController; @Inject private MetadataEventService metadataEventService; @Inject private NiFiPropertyDescriptorTransform propertyDescriptorTransform; @Inject private DerivedDatasourceFactory derivedDatasourceFactory; // use autowired instead of Inject to allow null values. @Autowired(required = false) @Qualifier("hadoopAuthorizationService") private HadoopAuthorizationService hadoopAuthorizationService; @Inject private SecurityService securityService; @Inject protected CategoryProvider categoryProvider; @Inject protected FeedProvider feedProvider; @Inject protected MetadataAccess metadataAccess; @Inject private FeedManagerTemplateService feedManagerTemplateService; @Inject private RegisteredTemplateService registeredTemplateService; @Inject PropertyExpressionResolver propertyExpressionResolver; @Inject NifiFlowCache nifiFlowCache; @Inject private NiFiTemplateCache niFiTemplateCache; @Inject private LegacyNifiRestClient nifiRestClient; @Inject private FeedHiveTableService feedHiveTableService; @Inject private TemplateConnectionUtil templateConnectionUtil; @Value("${nifi.remove.inactive.versioned.feeds:true}") private boolean removeInactiveNifiVersionedFeedFlows; @Value("${nifi.auto.align:true}") private boolean nifiAutoFeedsAlignAfterSave; @Inject private NiFiObjectCache niFiObjectCache; /** * Adds listeners for transferring events. */ @PostConstruct public void addEventListener() { metadataEventService.addListener(feedPropertyChangeListener); } /** * Removes listeners and stops transferring events. */ @PreDestroy public void removeEventListener() { metadataEventService.removeListener(feedPropertyChangeListener); } @Override public boolean checkFeedPermission(String id, Action action, Action... more) { if (accessController.isEntityAccessControlled()) { return metadataAccess.read(() -> { Feed.ID domainId = feedProvider.resolveId(id); Feed domainFeed = feedProvider.findById(domainId); if (domainFeed != null) { domainFeed.getAllowedActions().checkPermission(action, more); return true; } else { return false; } }); } else { return true; } } @Override public FeedMetadata getFeedByName(final String categoryName, final String feedName) { FeedMetadata feedMetadata = metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); Feed domainFeed = feedProvider.findBySystemName(categoryName, feedName); if (domainFeed != null) { return feedModelTransform.domainToFeedMetadata(domainFeed); } return null; }); return feedMetadata; } @Override public FeedMetadata getFeedById(final String id) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); return getFeedById(id, false); }); } @Override public FeedMetadata getFeedById(final String id, final boolean refreshTargetTableSchema) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); FeedMetadata feedMetadata = null; Feed.ID domainId = feedProvider.resolveId(id); Feed domainFeed = feedProvider.findById(domainId); if (domainFeed != null) { feedMetadata = feedModelTransform.domainToFeedMetadata(domainFeed); } if (refreshTargetTableSchema && feedMetadata != null) { //commented out for now as some issues were found with feeds with TEXTFILE as their output //this will attempt to sync the schema stored in modeshape with that in Hive // feedModelTransform.refreshTableSchemaFromHive(feedMetadata); } return feedMetadata; }); } @Override public FeedVersions getFeedVersions(String feedId, boolean includeContent) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); Feed.ID domainId = feedProvider.resolveId(feedId); return feedProvider.findVersions(domainId, includeContent) .map(list -> feedModelTransform.domainToFeedVersions(list, domainId)) .orElse((FeedVersions) null); }); } @Override public Optional<EntityVersion> getFeedVersion(String feedId, String versionId, boolean includeContent) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); Feed.ID domainFeedId = feedProvider.resolveId(feedId); com.thinkbiganalytics.metadata.api.versioning.EntityVersion.ID domainVersionId = feedProvider.resolveVersion(versionId); return feedProvider.findVersion(domainFeedId, domainVersionId, includeContent) .map(version -> feedModelTransform.domainToFeedVersion(version)); }); } @Override public Collection<FeedMetadata> getFeeds() { return getFeeds(PAGE_ALL, null).getContent(); } public Page<FeedMetadata> getFeeds(Pageable pageable, String filter) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); Page<Feed> domainFeeds = feedProvider.findPage(pageable, filter); return domainFeeds.map(d -> feedModelTransform.domainToFeedMetadata(d)); }); } @Override public Collection<? extends UIFeed> getFeeds(boolean verbose) { if (verbose) { return getFeeds(); } else { return getFeedSummaryData(); } } @Override public Page<UIFeed> getFeeds(boolean verbose, Pageable pageable, String filter) { if (verbose) { return getFeeds(pageable, filter).map(UIFeed.class::cast); } else { return getFeedSummaryData(pageable, filter).map(UIFeed.class::cast); } } @Override public List<FeedSummary> getFeedSummaryData() { return getFeedSummaryData(PAGE_ALL, null).getContent().stream() .map(FeedSummary.class::cast) .collect(Collectors.toList()); } public Page<FeedSummary> getFeedSummaryData(Pageable pageable, String filter) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); Page<Feed> domainFeeds = feedProvider.findPage(pageable, filter); return domainFeeds.map(d -> feedModelTransform.domainToFeedSummary(d)); }); } @Override public List<FeedSummary> getFeedSummaryForCategory(final String categoryId) { return metadataAccess.read(() -> { List<FeedSummary> summaryList = new ArrayList<>(); boolean hasPermission = this.accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); if (hasPermission) { Category.ID categoryDomainId = categoryProvider.resolveId(categoryId); List<? extends Feed> domainFeeds = feedProvider.findByCategoryId(categoryDomainId); if (domainFeeds != null && !domainFeeds.isEmpty()) { List<FeedMetadata> feeds = feedModelTransform.domainToFeedMetadata(domainFeeds); for (FeedMetadata feed : feeds) { summaryList.add(new FeedSummary(feed)); } } } return summaryList; }); } @Override public List<FeedMetadata> getFeedsWithTemplate(final String registeredTemplateId) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); List<FeedMetadata> feedMetadatas = null; FeedManagerTemplate.ID templateDomainId = templateProvider.resolveId(registeredTemplateId); List<? extends Feed> domainFeeds = feedProvider.findByTemplateId(templateDomainId); if (domainFeeds != null) { feedMetadatas = feedModelTransform.domainToFeedMetadata(domainFeeds); } return feedMetadatas; }); } @Override public Feed.ID resolveFeed(@Nonnull Serializable fid) { return metadataAccess.read(() -> feedProvider.resolveFeed(fid)); } /** * Create/Update a Feed in NiFi. Save the metadata to Kylo meta store. * * @param feedMetadata the feed metadata * @return an object indicating if the feed creation was successful or not */ public NifiFeed createFeed(final FeedMetadata feedMetadata) { //functional access to be able to create a feed this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS); if (feedMetadata.getState() == null) { if (feedMetadata.isActive()) { feedMetadata.setState(Feed.State.ENABLED.name()); } else { feedMetadata.setState(Feed.State.DISABLED.name()); } } if (StringUtils.isBlank(feedMetadata.getId())) { feedMetadata.setIsNew(true); } //Read all the feeds as System Service account to ensure the feed name is unique if (feedMetadata.isNew()) { metadataAccess.read(() -> { Feed existing = feedProvider.findBySystemName(feedMetadata.getCategory().getSystemName(), feedMetadata.getSystemFeedName()); if (existing != null) { throw new DuplicateFeedNameException(feedMetadata.getCategoryName(), feedMetadata.getFeedName()); } }, MetadataAccess.SERVICE); } NifiFeed feed = createAndSaveFeed(feedMetadata); //register the audit for the update event if (feed.isSuccess() && !feedMetadata.isNew()) { Feed.State state = Feed.State.valueOf(feedMetadata.getState()); Feed.ID id = feedProvider.resolveId(feedMetadata.getId()); notifyFeedStateChange(feedMetadata, id, state, MetadataChange.ChangeType.UPDATE); } else if (feed.isSuccess() && feedMetadata.isNew()) { //update the access control feedMetadata.toRoleMembershipChangeList().stream().forEach(roleMembershipChange -> securityService.changeFeedRoleMemberships(feed.getFeedMetadata().getId(), roleMembershipChange)); } return feed; } /** * Create/Update a Feed in NiFi. Save the metadata to Kylo meta store. * * @param feedMetadata the feed metadata * @return an object indicating if the feed creation was successful or not */ private NifiFeed createAndSaveFeed(FeedMetadata feedMetadata) { Stopwatch stopwatch = Stopwatch.createStarted(); NifiFeed feed = null; if (StringUtils.isBlank(feedMetadata.getId())) { feedMetadata.setIsNew(true); //If the feed is New we need to ensure the user has CREATE_FEED entity permission if (accessController.isEntityAccessControlled()) { metadataAccess.read(() -> { //ensure the user has rights to create feeds under the category Category domainCategory = categoryProvider.findById(categoryProvider.resolveId(feedMetadata.getCategory().getId())); if (domainCategory == null) { //throw exception throw new MetadataRepositoryException("Unable to find the category " + feedMetadata.getCategory().getSystemName()); } domainCategory.getAllowedActions().checkPermission(CategoryAccessControl.CREATE_FEED); //ensure the user has rights to create feeds using the template FeedManagerTemplate domainTemplate = templateProvider.findById(templateProvider.resolveId(feedMetadata.getTemplateId())); if (domainTemplate == null) { throw new MetadataRepositoryException("Unable to find the template " + feedMetadata.getTemplateId()); } // domainTemplate.getAllowedActions().checkPermission(TemplateAccessControl.CREATE_FEED); }); } } else if (accessController.isEntityAccessControlled()) { metadataAccess.read(() -> { //perform explict entity access check here as we dont want to modify the NiFi flow unless user has access to edit the feed Feed.ID domainId = feedProvider.resolveId(feedMetadata.getId()); Feed domainFeed = feedProvider.findById(domainId); if (domainFeed != null) { domainFeed.getAllowedActions().checkPermission(FeedAccessControl.EDIT_DETAILS); } else { throw new NotFoundException("Feed not found for id " + feedMetadata.getId()); } }); } //replace expressions with values if (feedMetadata.getTable() != null) { feedMetadata.getTable().updateMetadataFieldValues(); } if (feedMetadata.getProperties() == null) { feedMetadata.setProperties(new ArrayList<NifiProperty>()); } //store ref to the originalFeedProperties before resolving and merging with the template List<NifiProperty> originalFeedProperties = feedMetadata.getProperties(); //get all the properties for the metadata RegisteredTemplate registeredTemplate = registeredTemplateService.findRegisteredTemplate( new RegisteredTemplateRequest.Builder().templateId(feedMetadata.getTemplateId()).templateName(feedMetadata.getTemplateName()).isFeedEdit(true).includeSensitiveProperties(true) .build()); //copy the registered template properties it a new list so it doest get updated List<NifiProperty> templateProperties =registeredTemplate.getProperties().stream().map(nifiProperty -> new NifiProperty(nifiProperty)).collect(Collectors.toList()); //update the template properties with the feedMetadata properties List<NifiProperty> matchedProperties = NifiPropertyUtil .matchAndSetPropertyByProcessorName(templateProperties, feedMetadata.getProperties(), NifiPropertyUtil.PROPERTY_MATCH_AND_UPDATE_MODE.UPDATE_ALL_PROPERTIES); registeredTemplate.setProperties(templateProperties); feedMetadata.setProperties(registeredTemplate.getProperties()); feedMetadata.setRegisteredTemplate(registeredTemplate); //resolve any ${metadata.} properties List<NifiProperty> resolvedProperties = propertyExpressionResolver.resolvePropertyExpressions(feedMetadata); //decrypt the metadata feedModelTransform.decryptSensitivePropertyValues(feedMetadata); FeedMetadata.STATE state = FeedMetadata.STATE.NEW; try { state = FeedMetadata.STATE.valueOf(feedMetadata.getState()); } catch (Exception e) { //if the string isnt valid, disregard as it will end up disabling the feed. } boolean enabled = (FeedMetadata.STATE.NEW.equals(state) && feedMetadata.isActive()) || FeedMetadata.STATE.ENABLED.equals(state); // flag to indicate to enable the feed later //if this is the first time for this feed and it is set to be enabled, mark it to be enabled after we commit to the JCR store boolean enableLater = false; if (enabled && feedMetadata.isNew()) { enableLater = true; enabled = false; feedMetadata.setState(FeedMetadata.STATE.DISABLED.name()); } CreateFeedBuilder feedBuilder = CreateFeedBuilder .newFeed(nifiRestClient, nifiFlowCache, feedMetadata, registeredTemplate.getNifiTemplateId(), propertyExpressionResolver, propertyDescriptorTransform, niFiObjectCache, templateConnectionUtil) .enabled(enabled) .removeInactiveVersionedProcessGroup(removeInactiveNifiVersionedFeedFlows) .autoAlign(nifiAutoFeedsAlignAfterSave) .withNiFiTemplateCache(niFiTemplateCache); if (registeredTemplate.isReusableTemplate()) { feedBuilder.setReusableTemplate(true); feedMetadata.setIsReusableFeed(true); } else { feedBuilder.inputProcessorType(feedMetadata.getInputProcessorType()) .feedSchedule(feedMetadata.getSchedule()).properties(feedMetadata.getProperties()); if (registeredTemplate.usesReusableTemplate()) { for (ReusableTemplateConnectionInfo connection : registeredTemplate.getReusableTemplateConnections()) { feedBuilder.addInputOutputPort(new InputOutputPort(connection.getReusableTemplateInputPortName(), connection.getFeedOutputPortName())); } } } stopwatch.stop(); log.debug("Time to prepare data for saving feed in NiFi: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); stopwatch.start(); NifiProcessGroup entity = feedBuilder.build(); stopwatch.stop(); log.debug("Time to save feed in NiFi: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); feed = new NifiFeed(feedMetadata, entity); //set the original feedProperties back to the feed feedMetadata.setProperties(originalFeedProperties); //encrypt the metadata properties feedModelTransform.encryptSensitivePropertyValues(feedMetadata); if (entity.isSuccess()) { feedMetadata.setNifiProcessGroupId(entity.getProcessGroupEntity().getId()); try { stopwatch.start(); saveFeed(feedMetadata); feed.setEnableAfterSave(enableLater); feed.setSuccess(true); stopwatch.stop(); log.debug("Time to saveFeed in Kylo: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); stopwatch.start(); feedBuilder.checkAndRemoveVersionedProcessGroup(); } catch (Exception e) { feed.setSuccess(false); feed.addErrorMessage(e); } } else { feed.setSuccess(false); } if (!feed.isSuccess()) { if (!entity.isRolledBack()) { try { feedBuilder.rollback(); } catch (FeedRollbackException rollbackException) { log.error("Error rolling back feed {}. {} ", feedMetadata.getCategoryAndFeedName(), rollbackException.getMessage()); feed.addErrorMessage("Error occurred in rolling back the Feed."); } entity.setRolledBack(true); } } return feed; } private void saveFeed(final FeedMetadata feed) { metadataAccess.commit(() -> { Stopwatch stopwatch = Stopwatch.createStarted(); List<? extends HadoopSecurityGroup> previousSavedSecurityGroups = null; // Store the old security groups before saving because we need to compare afterward if (!feed.isNew()) { Feed previousStateBeforeSaving = feedProvider.findById(feedProvider.resolveId(feed.getId())); Map<String, String> userProperties = previousStateBeforeSaving.getUserProperties(); previousSavedSecurityGroups = previousStateBeforeSaving.getSecurityGroups(); } //if this is the first time saving this feed create a new one Feed domainFeed = feedModelTransform.feedToDomain(feed); if (domainFeed.getState() == null) { domainFeed.setState(Feed.State.ENABLED); } stopwatch.stop(); log.debug("Time to transform the feed to a domain object for saving: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); //initially save the feed if (feed.isNew()) { stopwatch.start(); domainFeed = feedProvider.update(domainFeed); stopwatch.stop(); log.debug("Time to save the New feed: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); } final String domainId = domainFeed.getId().toString(); final String feedName = FeedNameUtil.fullName(domainFeed.getCategory().getSystemName(), domainFeed.getName()); // Build preconditions stopwatch.start(); assignFeedDependencies(feed, domainFeed); stopwatch.stop(); log.debug("Time to assignFeedDependencies: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); //Assign the datasources stopwatch.start(); assignFeedDatasources(feed, domainFeed); stopwatch.stop(); log.debug("Time to assignFeedDatasources: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); stopwatch.start(); boolean isStream = feed.getRegisteredTemplate() != null ? feed.getRegisteredTemplate().isStream() : false; Long timeBetweenBatchJobs = feed.getRegisteredTemplate() != null ? feed.getRegisteredTemplate().getTimeBetweenStartingBatchJobs() : 0L; //sync the feed information to ops manager metadataAccess.commit(() -> opsManagerFeedProvider.save(opsManagerFeedProvider.resolveId(domainId), feedName, isStream, timeBetweenBatchJobs)); stopwatch.stop(); log.debug("Time to sync feed data with Operations Manager: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); // Update hadoop security group polices if the groups changed if (!feed.isNew() && !ListUtils.isEqualList(previousSavedSecurityGroups, domainFeed.getSecurityGroups())) { stopwatch.start(); List<? extends HadoopSecurityGroup> securityGroups = domainFeed.getSecurityGroups(); List<String> groupsAsCommaList = securityGroups.stream().map(group -> group.getName()).collect(Collectors.toList()); hadoopAuthorizationService.updateSecurityGroupsForAllPolicies(feed.getSystemCategoryName(), feed.getSystemFeedName(), groupsAsCommaList, domainFeed.getProperties()); stopwatch.stop(); log.debug("Time to update hadoop security groups: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); } // Update Hive metastore stopwatch.start(); final boolean hasHiveDestination = domainFeed.getDestinations().stream() .map(FeedDestination::getDatasource) .filter(DerivedDatasource.class::isInstance) .map(DerivedDatasource.class::cast) .anyMatch(datasource -> "HiveDatasource".equals(datasource.getDatasourceType())); if (hasHiveDestination) { try { feedHiveTableService.updateColumnDescriptions(feed); } catch (final DataAccessException e) { log.warn("Failed to update column descriptions for feed: {}", feed.getCategoryAndFeedDisplayName(), e); } } stopwatch.stop(); log.debug("Time to update hive metastore: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); // Update Kylo metastore stopwatch.start(); domainFeed = feedProvider.update(domainFeed); stopwatch.stop(); log.debug("Time to call feedProvider.update: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.reset(); }, (e) -> { if (feed.isNew() && StringUtils.isNotBlank(feed.getId())) { //Rollback ops Manager insert if it is newly created metadataAccess.commit(() -> { opsManagerFeedProvider.delete(opsManagerFeedProvider.resolveId(feed.getId())); }); } }); } /** * Looks for the Feed Preconditions and assigns the Feed Dependencies */ private void assignFeedDependencies(FeedMetadata feed, Feed domainFeed) { final Feed.ID domainFeedId = domainFeed.getId(); List<PreconditionRule> preconditions = feed.getSchedule().getPreconditions(); if (preconditions != null) { PreconditionPolicyTransformer transformer = new PreconditionPolicyTransformer(preconditions); transformer.applyFeedNameToCurrentFeedProperties(feed.getCategory().getSystemName(), feed.getSystemFeedName()); List<com.thinkbiganalytics.metadata.rest.model.sla.ObligationGroup> transformedPreconditions = transformer.getPreconditionObligationGroups(); ServiceLevelAgreementBuilder preconditionBuilder = feedProvider.buildPrecondition(domainFeed.getId()).name("Precondition for feed " + feed.getCategoryAndFeedName() + " (" + domainFeed.getId() + ")"); for (com.thinkbiganalytics.metadata.rest.model.sla.ObligationGroup precondition : transformedPreconditions) { for (Obligation group : precondition.getObligations()) { preconditionBuilder.obligationGroupBuilder(ObligationGroup.Condition.valueOf(precondition.getCondition())).obligationBuilder().metric(group.getMetrics()).build(); } } preconditionBuilder.build(); //add in the lineage dependency relationships //will the feed exist in the jcr store here if it is new?? //store the existing list of dependent feeds to track and delete those that dont match Set<Feed.ID> oldDependentFeedIds = new HashSet<Feed.ID>(); Set<Feed.ID> newDependentFeedIds = new HashSet<Feed.ID>(); List<Feed> dependentFeeds = domainFeed.getDependentFeeds(); if (dependentFeeds != null && !dependentFeeds.isEmpty()) { dependentFeeds.stream().forEach(dependentFeed -> { oldDependentFeedIds.add(dependentFeed.getId()); }); } //find those preconditions that are marked as dependent feed types List<Precondition> preconditionPolicies = transformer.getPreconditionPolicies(); preconditionPolicies.stream().filter(precondition -> precondition instanceof DependentFeedPrecondition).forEach(dependentFeedPrecondition -> { DependentFeedPrecondition feedPrecondition = (DependentFeedPrecondition) dependentFeedPrecondition; List<String> dependentFeedNames = feedPrecondition.getDependentFeedNames(); if (dependentFeedNames != null && !dependentFeedNames.isEmpty()) { //find the feed for (String dependentFeedName : dependentFeedNames) { Feed dependentFeed = feedProvider.findBySystemName(dependentFeedName); if (dependentFeed != null) { Feed.ID newDependentFeedId = dependentFeed.getId(); newDependentFeedIds.add(newDependentFeedId); //add and persist it if it doesnt already exist if (!oldDependentFeedIds.contains(newDependentFeedId)) { feedProvider.addDependent(domainFeedId, dependentFeed.getId()); } } } } }); //delete any of those dependent feed ids from the oldDependentFeeds that are not part of the newDependentFeedIds oldDependentFeedIds.stream().filter(oldFeedId -> !newDependentFeedIds.contains(oldFeedId)) .forEach(dependentFeedToDelete -> feedProvider.removeDependent(domainFeedId, dependentFeedToDelete)); } } /** * Update a given feeds datasources clearing its sources/destinations before revaluating the data * * @param feedId the id of the feed rest model to update */ public void updateFeedDatasources(String feedId) { metadataAccess.commit(() -> { feedProvider.removeFeedDestinations(feedProvider.resolveId(feedId)); feedProvider.removeFeedSources(feedProvider.resolveId(feedId)); }); metadataAccess.commit(() -> { Feed domainFeed = feedProvider.findById(feedProvider.resolveId(feedId)); FeedMetadata feed = feedModelTransform.domainToFeedMetadata(domainFeed); assignFeedDatasources(feed, domainFeed); }); } /** * Iterate all of the feeds, clear all sources/destinations and reassign * Note this will be an expensive call */ public void updateAllFeedsDatasources() { metadataAccess.commit(() -> { feedProvider.findAll().stream().forEach(domainFeed -> { domainFeed.clearSourcesAndDestinations(); }); }); metadataAccess.commit(() -> { feedProvider.findAll().stream().forEach(domainFeed -> { FeedMetadata feed = feedModelTransform.domainToFeedMetadata(domainFeed); assignFeedDatasources(feed, domainFeed); }); }); } /** * Assign the feed sources/destinations * * @param feed the feed rest model * @param domainFeed the domain feed */ private void assignFeedDatasources(FeedMetadata feed, Feed domainFeed) { final Feed.ID domainFeedId = domainFeed.getId(); Set<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> sources = new HashSet<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID>(); Set<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> destinations = new HashSet<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID>(); String uniqueName = FeedNameUtil.fullName(feed.getCategory().getSystemName(), feed.getSystemFeedName()); RegisteredTemplate template = feed.getRegisteredTemplate(); if (template == null) { //fetch it for checks template = templateRestProvider.getRegisteredTemplate(feed.getTemplateId()); } //find Definition registration derivedDatasourceFactory.populateDatasources(feed, template, sources, destinations); //remove the older sources only if they have changed if (domainFeed.getSources() != null) { Set<Datasource.ID> existingSourceIds = ((List<FeedSource>) domainFeed.getSources()).stream().filter(source -> source.getDatasource() != null).map(source1 -> source1.getDatasource().getId()).collect(Collectors.toSet()); if (!sources.containsAll(existingSourceIds) || (sources.size() != existingSourceIds.size())) { //remove older sources //cant do it here for some reason.. need to do it in a separate transaction feedProvider.removeFeedSources(domainFeedId); } } sources.stream().forEach(sourceId -> feedProvider.ensureFeedSource(domainFeedId, sourceId)); destinations.stream().forEach(sourceId -> feedProvider.ensureFeedDestination(domainFeedId, sourceId)); } @Override public void deleteFeed(@Nonnull final String feedId) { metadataAccess.commit(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS); Feed.ID feedIdentifier = feedProvider.resolveFeed(feedId); Feed feed = feedProvider.getFeed(feedIdentifier); //unschedule any SLAs serviceLevelAgreementService.removeAndUnscheduleAgreementsForFeed(feedIdentifier,feed.getQualifiedName()); feedProvider.deleteFeed(feed.getId()); opsManagerFeedProvider.delete(opsManagerFeedProvider.resolveId(feedId)); return true; }); } @Override public void enableFeedCleanup(@Nonnull String feedId) { metadataAccess.commit(() -> { final Feed.ID id = feedProvider.resolveFeed(feedId); return feedProvider.mergeFeedProperties(id, ImmutableMap.of(FeedProperties.CLEANUP_ENABLED, "true")); }); } private boolean enableFeed(final Feed.ID feedId) { return metadataAccess.commit(() -> { boolean enabled = feedProvider.enableFeed(feedId); Feed domainFeed = feedProvider.findById(feedId); if (domainFeed != null) { domainFeed.setState(Feed.State.ENABLED); feedProvider.update(domainFeed); if (enabled) { FeedMetadata feedMetadata = feedModelTransform.domainToFeedMetadata(domainFeed); notifyFeedStateChange(feedMetadata, feedId, Feed.State.ENABLED, MetadataChange.ChangeType.UPDATE); } } return enabled; }); } // @Transactional(transactionManager = "metadataTransactionManager") private boolean disableFeed(final Feed.ID feedId) { return metadataAccess.commit(() -> { boolean disabled = feedProvider.disableFeed(feedId); Feed domainFeed = feedProvider.findById(feedId); if (domainFeed != null) { domainFeed.setState(Feed.State.DISABLED); feedProvider.update(domainFeed); if (disabled) { FeedMetadata feedMetadata = feedModelTransform.domainToFeedMetadata(domainFeed); notifyFeedStateChange(feedMetadata, feedId, Feed.State.DISABLED, MetadataChange.ChangeType.UPDATE); } } return disabled; }); } public FeedSummary enableFeed(final String feedId) { return metadataAccess.commit(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS); if (StringUtils.isNotBlank(feedId)) { FeedMetadata feedMetadata = getFeedById(feedId); Feed.ID domainId = feedProvider.resolveFeed(feedId); boolean enabled = enableFeed(domainId); //re fetch it if (enabled) { feedMetadata.setState(Feed.State.ENABLED.name()); serviceLevelAgreementService.enableServiceLevelAgreementSchedule(domainId); } FeedSummary feedSummary = new FeedSummary(feedMetadata); //start any Slas return feedSummary; } return null; }); } public FeedSummary disableFeed(final String feedId) { return metadataAccess.commit(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS); if (StringUtils.isNotBlank(feedId)) { FeedMetadata feedMetadata = getFeedById(feedId); Feed.ID domainId = feedProvider.resolveFeed(feedId); boolean disabled = disableFeed(domainId); //re fetch it if (disabled) { feedMetadata.setState(Feed.State.DISABLED.name()); serviceLevelAgreementService.disableServiceLevelAgreementSchedule(domainId); } FeedSummary feedSummary = new FeedSummary(feedMetadata); return feedSummary; } return null; }); } @Override /** * Applies new LableValue array to the FieldProperty.selectableValues {label = Category.Display Feed Name, value=category.system_feed_name} */ public void applyFeedSelectOptions(List<FieldRuleProperty> properties) { if (properties != null && !properties.isEmpty()) { List<FeedSummary> feedSummaries = getFeedSummaryData(); List<LabelValue> feedSelection = new ArrayList<>(); for (FeedSummary feedSummary : feedSummaries) { boolean isDisabled = feedSummary.getState() == Feed.State.DISABLED.name(); boolean canEditDetails = accessController.isEntityAccessControlled() ? feedSummary.hasAction(FeedAccessControl.EDIT_DETAILS.getSystemName()) : accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS); Map<String, Object> labelValueProperties = new HashMap<>(); labelValueProperties.put("feed:disabled", isDisabled); labelValueProperties.put("feed:editDetails", canEditDetails); feedSelection.add(new LabelValue(feedSummary.getCategoryAndFeedDisplayName() + (isDisabled ? " (DISABLED) " : ""), feedSummary.getCategoryAndFeedSystemName(), isDisabled ? "This feed is currently disabled" : "", labelValueProperties)); } feedSelection.sort(Comparator.comparing(LabelValue::getLabel, String.CASE_INSENSITIVE_ORDER)); for (FieldRuleProperty property : properties) { property.setSelectableValues(feedSelection); if (property.getValues() == null) { property.setValues(new ArrayList<>()); // reset the intial values to be an empty arraylist } } } } @Nonnull @Override public Set<UserField> getUserFields() { return metadataAccess.read(() -> { boolean hasPermission = this.accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); return hasPermission ? UserPropertyTransform.toUserFields(feedProvider.getUserFields()) : Collections.emptySet(); }); } @Override public void setUserFields(@Nonnull final Set<UserField> userFields) { boolean hasPermission = this.accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS); if (hasPermission) { feedProvider.setUserFields(UserPropertyTransform.toUserFieldDescriptors(userFields)); } } @Nonnull @Override public Optional<Set<UserProperty>> getUserFields(@Nonnull final String categoryId) { return metadataAccess.read(() -> { this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS); final Optional<Set<UserFieldDescriptor>> categoryUserFields = categoryProvider.getFeedUserFields(categoryProvider.resolveId(categoryId)); final Set<UserFieldDescriptor> globalUserFields = feedProvider.getUserFields(); if (categoryUserFields.isPresent()) { return Optional.of(UserPropertyTransform.toUserProperties(Collections.emptyMap(), Sets.union(globalUserFields, categoryUserFields.get()))); } else { return Optional.empty(); } }); } private class FeedPropertyChangeDispatcher implements MetadataEventListener<FeedPropertyChangeEvent> { @Override public void notify(@Nonnull final FeedPropertyChangeEvent metadataEvent) { Properties oldProperties = metadataEvent.getData().getNifiPropertiesToDelete(); metadataAccess.commit(() -> { Feed feed = feedProvider.getFeed(feedProvider.resolveFeed(metadataEvent.getData().getFeedId())); oldProperties.forEach((k, v) -> { feed.removeProperty((String) k); }); }, MetadataAccess.SERVICE); } } /** * update the audit information for feed state changes * * @param feedId the feed id * @param state the new state * @param changeType the event type */ private void notifyFeedStateChange(FeedMetadata feedMetadata, Feed.ID feedId, Feed.State state, MetadataChange.ChangeType changeType) { final Principal principal = SecurityContextHolder.getContext().getAuthentication() != null ? SecurityContextHolder.getContext().getAuthentication() : null; String feedName = feedMetadata != null ? feedMetadata.getCategoryAndFeedName() : ""; FeedChange change = new FeedChange(changeType, feedName, feedName, feedId, state); FeedChangeEvent event = new FeedChangeEvent(change, DateTime.now(), principal); metadataEventService.notify(event); } }
/* * Knetik Platform API Documentation latest * This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com. * * OpenAPI spec version: latest * Contact: support@knetik.com * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package com.knetikcloud.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import com.knetikcloud.model.Order; import com.knetikcloud.model.QuestionResource; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; /** * PageResourceQuestionResource */ @javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2018-03-14T12:03:43.231-04:00") public class PageResourceQuestionResource { @JsonProperty("content") private List<QuestionResource> content = null; @JsonProperty("first") private Boolean first = null; @JsonProperty("last") private Boolean last = null; @JsonProperty("number") private Integer number = null; @JsonProperty("number_of_elements") private Integer numberOfElements = null; @JsonProperty("size") private Integer size = null; @JsonProperty("sort") private List<Order> sort = null; @JsonProperty("total_elements") private Long totalElements = null; @JsonProperty("total_pages") private Integer totalPages = null; public PageResourceQuestionResource content(List<QuestionResource> content) { this.content = content; return this; } public PageResourceQuestionResource addContentItem(QuestionResource contentItem) { if (this.content == null) { this.content = new ArrayList<QuestionResource>(); } this.content.add(contentItem); return this; } /** * Get content * @return content **/ @ApiModelProperty(value = "") public List<QuestionResource> getContent() { return content; } public void setContent(List<QuestionResource> content) { this.content = content; } public PageResourceQuestionResource first(Boolean first) { this.first = first; return this; } /** * Get first * @return first **/ @ApiModelProperty(value = "") public Boolean isFirst() { return first; } public void setFirst(Boolean first) { this.first = first; } public PageResourceQuestionResource last(Boolean last) { this.last = last; return this; } /** * Get last * @return last **/ @ApiModelProperty(value = "") public Boolean isLast() { return last; } public void setLast(Boolean last) { this.last = last; } public PageResourceQuestionResource number(Integer number) { this.number = number; return this; } /** * Get number * @return number **/ @ApiModelProperty(value = "") public Integer getNumber() { return number; } public void setNumber(Integer number) { this.number = number; } public PageResourceQuestionResource numberOfElements(Integer numberOfElements) { this.numberOfElements = numberOfElements; return this; } /** * Get numberOfElements * @return numberOfElements **/ @ApiModelProperty(value = "") public Integer getNumberOfElements() { return numberOfElements; } public void setNumberOfElements(Integer numberOfElements) { this.numberOfElements = numberOfElements; } public PageResourceQuestionResource size(Integer size) { this.size = size; return this; } /** * Get size * @return size **/ @ApiModelProperty(value = "") public Integer getSize() { return size; } public void setSize(Integer size) { this.size = size; } public PageResourceQuestionResource sort(List<Order> sort) { this.sort = sort; return this; } public PageResourceQuestionResource addSortItem(Order sortItem) { if (this.sort == null) { this.sort = new ArrayList<Order>(); } this.sort.add(sortItem); return this; } /** * Get sort * @return sort **/ @ApiModelProperty(value = "") public List<Order> getSort() { return sort; } public void setSort(List<Order> sort) { this.sort = sort; } public PageResourceQuestionResource totalElements(Long totalElements) { this.totalElements = totalElements; return this; } /** * Get totalElements * @return totalElements **/ @ApiModelProperty(value = "") public Long getTotalElements() { return totalElements; } public void setTotalElements(Long totalElements) { this.totalElements = totalElements; } public PageResourceQuestionResource totalPages(Integer totalPages) { this.totalPages = totalPages; return this; } /** * Get totalPages * @return totalPages **/ @ApiModelProperty(value = "") public Integer getTotalPages() { return totalPages; } public void setTotalPages(Integer totalPages) { this.totalPages = totalPages; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PageResourceQuestionResource pageResourceQuestionResource = (PageResourceQuestionResource) o; return Objects.equals(this.content, pageResourceQuestionResource.content) && Objects.equals(this.first, pageResourceQuestionResource.first) && Objects.equals(this.last, pageResourceQuestionResource.last) && Objects.equals(this.number, pageResourceQuestionResource.number) && Objects.equals(this.numberOfElements, pageResourceQuestionResource.numberOfElements) && Objects.equals(this.size, pageResourceQuestionResource.size) && Objects.equals(this.sort, pageResourceQuestionResource.sort) && Objects.equals(this.totalElements, pageResourceQuestionResource.totalElements) && Objects.equals(this.totalPages, pageResourceQuestionResource.totalPages); } @Override public int hashCode() { return Objects.hash(content, first, last, number, numberOfElements, size, sort, totalElements, totalPages); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class PageResourceQuestionResource {\n"); sb.append(" content: ").append(toIndentedString(content)).append("\n"); sb.append(" first: ").append(toIndentedString(first)).append("\n"); sb.append(" last: ").append(toIndentedString(last)).append("\n"); sb.append(" number: ").append(toIndentedString(number)).append("\n"); sb.append(" numberOfElements: ").append(toIndentedString(numberOfElements)).append("\n"); sb.append(" size: ").append(toIndentedString(size)).append("\n"); sb.append(" sort: ").append(toIndentedString(sort)).append("\n"); sb.append(" totalElements: ").append(toIndentedString(totalElements)).append("\n"); sb.append(" totalPages: ").append(toIndentedString(totalPages)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
package com.adaptris.core.http.jetty.retry; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileFilter; import java.util.Collections; import java.util.Map; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.DirectoryFileFilter; import org.junit.AfterClass; import org.junit.Test; import com.adaptris.core.AdaptrisMessage; import com.adaptris.core.CoreConstants; import com.adaptris.core.DefaultMessageFactory; import com.adaptris.core.fs.FsHelper; import com.adaptris.core.lms.FileBackedMessageFactory; import com.adaptris.core.stubs.TempFileUtils; import com.adaptris.core.util.LifecycleHelper; import com.adaptris.interlok.InterlokException; import com.adaptris.interlok.cloud.RemoteBlob; import com.adaptris.interlok.junit.scaffolding.BaseCase; public class FilesystemRetryStoreTest { // On Windows since TEST_BASE_URL will contain file://localhost/c:/ // This gets magically URL encoded... so we can't assume that spaces will // make things fail, so for an invalid URL we must make // sure that we never have a drive letter. public static final String INVALID_URL = "file://localhost/./ spaces / not / valid / in / url"; public static final String TEST_BASE_URL = "retry.baseUrl"; @AfterClass public static void afterAll() throws Exception { FileUtils.deleteQuietly(FsHelper.toFile(BaseCase.getConfiguration(TEST_BASE_URL))); } @Test public void testWrite_PayloadMessage() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); File retryDir = FsHelper.toFile(BaseCase.getConfiguration(TEST_BASE_URL)); File msgDir = new File(retryDir, msg.getUniqueId()); assertTrue(retryDir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY).length >= 1); assertTrue(msgDir.exists()); assertEquals(2, msgDir.listFiles().length); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testWrite_PayloadMetadataException() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); msg.addObjectHeader(CoreConstants.OBJ_METADATA_EXCEPTION, new Exception()); store.write(msg); File retryDir = FsHelper.toFile(BaseCase.getConfiguration(TEST_BASE_URL)); File msgDir = new File(retryDir, msg.getUniqueId()); assertTrue(retryDir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY).length >= 1); assertTrue(msgDir.exists()); assertEquals(3, msgDir.listFiles().length); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testWrite_FileBacked() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new FileBackedMessageFactory().newMessage("hello"); store.write(msg); File dir = FsHelper.toFile(BaseCase.getConfiguration(TEST_BASE_URL)); assertTrue(dir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY).length >= 1); } finally { LifecycleHelper.stopAndClose(store); } } @Test(expected = InterlokException.class) public void testWrite_Exception() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(INVALID_URL); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testBuildForRetry() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); AdaptrisMessage retry = store.buildForRetry(msg.getUniqueId()); assertEquals(msg.getUniqueId(), retry.getUniqueId()); assertEquals(msg.getMessageHeaders(), retry.getMessageHeaders()); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testBuildForRetry_FileBacked() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); Map<String, String> metadata = store.getMetadata(msg.getUniqueId()); AdaptrisMessage retry = store.buildForRetry(msg.getUniqueId(), store.getMetadata(msg.getUniqueId()), new FileBackedMessageFactory()); assertEquals(msg.getUniqueId(), retry.getUniqueId()); assertEquals(msg.getMessageHeaders(), retry.getMessageHeaders()); } finally { LifecycleHelper.stopAndClose(store); } } @Test(expected = InterlokException.class) public void testBuildForRetry_Exception() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(INVALID_URL); try { LifecycleHelper.initAndStart(store); AdaptrisMessage retry = store.buildForRetry("xxx", Collections.EMPTY_MAP); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testGetMetadata() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); Map<String, String> metadata = store.getMetadata(msg.getUniqueId()); assertEquals(msg.getMessageHeaders(), metadata); } finally { LifecycleHelper.stopAndClose(store); } } @Test(expected = InterlokException.class) public void testGetMetadata_Exception() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); store.getMetadata("xxx"); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testReport() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); assertTrue(store.report().iterator().hasNext()); } finally { LifecycleHelper.stopAndClose(store); } } @Test(expected = InterlokException.class) public void testReport_Exception() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(INVALID_URL); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.report(); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testDelete() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); assertTrue(store.delete(msg.getUniqueId())); File retryDir = FsHelper.toFile(BaseCase.getConfiguration(TEST_BASE_URL)); File msgDir = new File(retryDir, msg.getUniqueId()); assertFalse(msgDir.exists()); } finally { LifecycleHelper.stopAndClose(store); } } @Test(expected = InterlokException.class) public void testDelete_Exception() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore() .withBaseUrl(INVALID_URL); try { LifecycleHelper.initAndStart(store); store.delete("XXXX"); } finally { LifecycleHelper.stopAndClose(store); } } @Test public void testCreateForReport() throws Exception { FilesystemRetryStore store = new FilesystemRetryStore().withBaseUrl(BaseCase.getConfiguration(TEST_BASE_URL)); try { LifecycleHelper.initAndStart(store); AdaptrisMessage msg = new DefaultMessageFactory().newMessage("hello"); store.write(msg); File retryStoreDir = FsHelper.toFile(BaseCase.getConfiguration(TEST_BASE_URL)); File storedMsgDir = new File(retryStoreDir, msg.getUniqueId()); RemoteBlob blob = FilesystemRetryStore.createForReport(storedMsgDir); assertNotNull(blob); assertEquals("hello".length(), blob.getSize()); File randomDir = TempFileUtils.createTrackedDir(store); assertNull(FilesystemRetryStore.createForReport(randomDir)); assertNull(FilesystemRetryStore.createForReport(null)); } finally { LifecycleHelper.stopAndClose(store); } } }
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.jooq; import java.util.concurrent.Executor; import javax.sql.DataSource; import org.jooq.DSLContext; import org.jooq.ExecuteListener; import org.jooq.ExecuteListenerProvider; import org.jooq.ExecutorProvider; import org.jooq.Record; import org.jooq.RecordListener; import org.jooq.RecordListenerProvider; import org.jooq.RecordMapper; import org.jooq.RecordMapperProvider; import org.jooq.RecordType; import org.jooq.RecordUnmapper; import org.jooq.RecordUnmapperProvider; import org.jooq.SQLDialect; import org.jooq.TransactionListener; import org.jooq.TransactionListenerProvider; import org.jooq.TransactionalRunnable; import org.jooq.VisitListener; import org.jooq.VisitListenerProvider; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; /** * Tests for {@link JooqAutoConfiguration}. * * @author Andreas Ahlenstorf * @author Phillip Webb * @author Andy Wilkinson * @author Stephane Nicoll * @author Dmytro Nosan */ public class JooqAutoConfigurationTests { private ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(JooqAutoConfiguration.class)) .withPropertyValues("spring.datasource.name:jooqtest"); @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void noDataSource() { this.contextRunner .run((context) -> assertThat(context.getBeansOfType(DSLContext.class)) .isEmpty()); } @Test public void jooqWithoutTx() { this.contextRunner.withUserConfiguration(JooqDataSourceConfiguration.class) .run((context) -> { assertThat(context).doesNotHaveBean(PlatformTransactionManager.class); assertThat(context).doesNotHaveBean(SpringTransactionProvider.class); DSLContext dsl = context.getBean(DSLContext.class); dsl.execute("create table jooqtest (name varchar(255) primary key);"); dsl.transaction(new AssertFetch(dsl, "select count(*) as total from jooqtest;", "0")); dsl.transaction(new ExecuteSql(dsl, "insert into jooqtest (name) values ('foo');")); dsl.transaction(new AssertFetch(dsl, "select count(*) as total from jooqtest;", "1")); try { dsl.transaction(new ExecuteSql(dsl, "insert into jooqtest (name) values ('bar');", "insert into jooqtest (name) values ('foo');")); fail("An DataIntegrityViolationException should have been thrown."); } catch (DataIntegrityViolationException ex) { // Ignore } dsl.transaction(new AssertFetch(dsl, "select count(*) as total from jooqtest;", "2")); }); } @Test public void jooqWithTx() { this.contextRunner.withUserConfiguration(JooqDataSourceConfiguration.class, TxManagerConfiguration.class).run((context) -> { assertThat(context).hasSingleBean(PlatformTransactionManager.class); DSLContext dsl = context.getBean(DSLContext.class); assertThat(dsl.configuration().dialect()) .isEqualTo(SQLDialect.HSQLDB); dsl.execute( "create table jooqtest_tx (name varchar(255) primary key);"); dsl.transaction(new AssertFetch(dsl, "select count(*) as total from jooqtest_tx;", "0")); dsl.transaction(new ExecuteSql(dsl, "insert into jooqtest_tx (name) values ('foo');")); dsl.transaction(new AssertFetch(dsl, "select count(*) as total from jooqtest_tx;", "1")); try { dsl.transaction(new ExecuteSql(dsl, "insert into jooqtest (name) values ('bar');", "insert into jooqtest (name) values ('foo');")); fail("A DataIntegrityViolationException should have been thrown."); } catch (DataIntegrityViolationException ex) { // Ignore } dsl.transaction(new AssertFetch(dsl, "select count(*) as total from jooqtest_tx;", "1")); }); } @Test public void customProvidersArePickedUp() { this.contextRunner.withUserConfiguration(JooqDataSourceConfiguration.class, TxManagerConfiguration.class, TestRecordMapperProvider.class, TestRecordUnmapperProvider.class, TestRecordListenerProvider.class, TestExecuteListenerProvider.class, TestVisitListenerProvider.class, TestTransactionListenerProvider.class, TestExecutorProvider.class) .run((context) -> { DSLContext dsl = context.getBean(DSLContext.class); assertThat(dsl.configuration().recordMapperProvider().getClass()) .isEqualTo(TestRecordMapperProvider.class); assertThat(dsl.configuration().recordUnmapperProvider().getClass()) .isEqualTo(TestRecordUnmapperProvider.class); assertThat(dsl.configuration().executorProvider().getClass()) .isEqualTo(TestExecutorProvider.class); assertThat(dsl.configuration().recordListenerProviders().length) .isEqualTo(1); assertThat(dsl.configuration().executeListenerProviders().length) .isEqualTo(2); assertThat(dsl.configuration().visitListenerProviders().length) .isEqualTo(1); assertThat(dsl.configuration().transactionListenerProviders().length) .isEqualTo(1); }); } @Test public void relaxedBindingOfSqlDialect() { this.contextRunner.withUserConfiguration(JooqDataSourceConfiguration.class) .withPropertyValues("spring.jooq.sql-dialect:PoSTGrES") .run((context) -> assertThat( context.getBean(org.jooq.Configuration.class).dialect()) .isEqualTo(SQLDialect.POSTGRES)); } private static class AssertFetch implements TransactionalRunnable { private final DSLContext dsl; private final String sql; private final String expected; AssertFetch(DSLContext dsl, String sql, String expected) { this.dsl = dsl; this.sql = sql; this.expected = expected; } @Override public void run(org.jooq.Configuration configuration) { assertThat(this.dsl.fetch(this.sql).getValue(0, 0).toString()) .isEqualTo(this.expected); } } private static class ExecuteSql implements TransactionalRunnable { private final DSLContext dsl; private final String[] sql; ExecuteSql(DSLContext dsl, String... sql) { this.dsl = dsl; this.sql = sql; } @Override public void run(org.jooq.Configuration configuration) { for (String statement : this.sql) { this.dsl.execute(statement); } } } @Configuration protected static class JooqDataSourceConfiguration { @Bean public DataSource jooqDataSource() { return DataSourceBuilder.create().url("jdbc:hsqldb:mem:jooqtest") .username("sa").build(); } } @Configuration protected static class TxManagerConfiguration { @Bean public PlatformTransactionManager transactionManager(DataSource dataSource) { return new DataSourceTransactionManager(dataSource); } } protected static class TestRecordMapperProvider implements RecordMapperProvider { @Override public <R extends Record, E> RecordMapper<R, E> provide(RecordType<R> recordType, Class<? extends E> aClass) { return null; } } protected static class TestRecordUnmapperProvider implements RecordUnmapperProvider { @Override public <E, R extends Record> RecordUnmapper<E, R> provide( Class<? extends E> aClass, RecordType<R> recordType) { return null; } } protected static class TestRecordListenerProvider implements RecordListenerProvider { @Override public RecordListener provide() { return null; } } protected static class TestExecuteListenerProvider implements ExecuteListenerProvider { @Override public ExecuteListener provide() { return null; } } protected static class TestVisitListenerProvider implements VisitListenerProvider { @Override public VisitListener provide() { return null; } } protected static class TestTransactionListenerProvider implements TransactionListenerProvider { @Override public TransactionListener provide() { return null; } } protected static class TestExecutorProvider implements ExecutorProvider { @Override public Executor provide() { return null; } } }
package com.sdp.capabilities.apocalymbics; import java.util.ArrayList; import org.json.JSONArray; import com.bmge.framework.Game; import com.bmge.framework.Screen; import com.bmge.zombiegame.SampleGame; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.util.Log; public class Apocalymbics { private static final String TAG = Apocalymbics.class.getSimpleName(); /************************************************************************* * SDP stuff * *************************************************************************/ private static Context applicationContext; public void setApplicationContext(Context applicationContext) { Apocalymbics.applicationContext = applicationContext; } /*************************************************************************/ static Game game; public void testLaunch() { if(game != null) { Log.d(TAG, "game was NOT null -> finish()"); ((Activity) game).finish(); Log.d(TAG, "finish() called!"); } game = null; Intent testIntent = new Intent(applicationContext, SampleGame.class); testIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); applicationContext.startActivity(testIntent); } /** THESE METHODS ARE CALLED FROM THE SERVER **/ /** * The first method that is called from the server. Because of this method * call, it starts starts SDPInterfaceBlackActivity, which executes its * OnCreate() method, which in turn starts AndroidGame activity and starts * the game on client * * @param numOfDevices * Number of devices (players) in the game * @param playerNumber * Number of this player * @param isAdmin * Is this player also the admin (who gets to choose the game) */ public int launchApplication(Integer playerNumber) { try { game.getCurrentScreen().setInitialInfo(playerNumber); } catch (Exception e) { Log.d("RemoteMethodListener/launchApplication", e.toString()); } return 0; } /** * Makes the player on this client admin */ public int makeAdminPlayer() { game.getCurrentScreen().makeAdminPlayer(); return 0; } /** * Makes the player on this client normal player */ public int makeNormalPlayer() { game.getCurrentScreen().makeNormalPlayer(); return 0; } /** * Returns the selected country of the client. If no country is selected or * player is not in country selection screen, value UNINITIALIZED is * returned * * @return Number of the chosen game */ public Integer getCountrySelection() { Log.d("RemoteMethodListener", "getCountrySelection() entry"); // In case player is not in country selection screen, return so that // no turn for country selection is given to the player if (!game.getCurrentScreen().isInCountrySelectionScreen()) { return Integer.valueOf(Screen.UNINITIALIZED); } // Give player his selection turn. If the player already has the turn, // this call doesn't have an effect game.getCurrentScreen().giveCountrySelectionTurn(); int countryNumber = game.getCurrentScreen().getCountrySelection(); //return countryNumber; return Integer.valueOf(countryNumber); } /** * Updates the currently selected countries by other players * * @param countrySelections * An array which contains currently selected countries */ public int updatePlayerInfo(JSONArray countrySelections, JSONArray playerNumbers) { try { /* JSONArray countrySelections = JSONmethodcallParameters .getJSONArray("countrySelections"); JSONArray playerNumbers = JSONmethodcallParameters .getJSONArray("playerNumbers"); */ ArrayList<Screen.Player> inGamePlayers = new ArrayList<Screen.Player>(); int amountOfSelections = countrySelections.length(); for (int player = 0; player < amountOfSelections; ++player) { Object countrySelectionObject = countrySelections.get(player); Object playerNumberObject = playerNumbers.get(player); Integer countryNumber = (Integer) countrySelectionObject; Integer playerNumber = (Integer) playerNumberObject; Screen.Player newPlayer = new Screen.Player(); newPlayer.country = countryNumber; newPlayer.number = playerNumber; newPlayer.allThrows = new ArrayList<Integer>(); newPlayer.wantsToContinue = true; inGamePlayers.add(newPlayer); } game.getCurrentScreen().updateInGamePlayers(inGamePlayers); } catch (Exception e) { Log.d("RemoteMethodListener/updatePlayerInfo", e.toString()); } return 0; } /** * Changes the screen to game selection */ public int moveToGameSelectionScreen() { game.getCurrentScreen().moveToGameSelectionScreen(); return 0; } /** * Inquires the selected game. This call is done only for the admin * * @return Number of the selected game */ public Integer getGameSelectionFromAdmin() { int gameNumber = game.getCurrentScreen().getGameSelectionFromAdmin(); //return gameNumber; return Integer.valueOf(gameNumber); } /** * Informs client of the game that admin selected */ public int updateGameSelection(Integer selectedGame) { try { //int selectedGame = JSONmethodcallParameters.getInt("selectedGame"); game.getCurrentScreen().updateGameSelection(selectedGame); } catch (Exception e) { Log.d("RemoteMethodListener/updateGameSelection", e.toString()); } return 0; } /** * Changes the screen to selected mini game */ public int moveToGameplayScreen() { game.getCurrentScreen().moveToGameplayScreen(); return 0; } /** * Changes the screen to winner screen */ public int moveToWinnerScreen() { game.getCurrentScreen().moveToWinnerScreen(); return 0; } /** * Returns final placement of the player. Server uses it to determine which * player gets the admin status next */ public Integer getPlayerPlacement() { int playerPlacement = game.getCurrentScreen().getThisPlayerPlacement(); //return playerPlacement; return Integer.valueOf(playerPlacement); } /** * Moves the player to play again screen */ public int moveToPlayAgainScreen() { game.getCurrentScreen().moveToPlayAgainScreen(); return 0; } /** * Has the player decided to continue or exit in play again screen * * @return Has the player decided to continue or exit game */ public Boolean hasDecided() { boolean hasDecided = game.getCurrentScreen().hasDecided(); return Boolean.valueOf(hasDecided); } /** * Has the player decided to continue playing * * @return Has the player decided to continue playing */ public Boolean wantsToContinue(Boolean wantsToContinue) { //boolean wantsToContinue = game.getCurrentScreen().wantsToContinue(); return Boolean.valueOf(wantsToContinue); } /** * Moves player to main menu screen (no other players anymore who want to * continue playing) */ public int moveToMainMenuScreen() { game.getCurrentScreen().moveToMainMenuScreen(); return 0; } /***************************************/ /** * SKULL THROW GAME SPECIFIC METHODS / ***************************************/ public int startRound(Integer roundNumber) { try { //int roundNumber = JSONmethodcallParameters.getInt("roundNumber"); game.getCurrentScreen().startRound(roundNumber); } catch (Exception e) { Log.d("RemoteMethodListener/startRound", e.toString()); } return 0; } public int updateGameplayTurn(Integer playerNumber) { try { //int playerNumber = JSONmethodcallParameters.getInt("playerNumber"); game.getCurrentScreen().updateGameplayTurn(playerNumber); } catch (Exception e) { Log.d("RemoteMethodListener/updateGameplayTurn", e.toString()); } return 0; } public Integer getPlayerThrowAngle() { int throwAngle = game.getCurrentScreen().getPlayerThrowAngle(); //return throwAngle; return Integer.valueOf(throwAngle); } public int showPlayerThrow(Integer playerNumber, Integer throwAngle) { try { //int playerNumber = JSONmethodcallParameters.getInt("playerNumber"); //int throwAngle = JSONmethodcallParameters.getInt("throwAngle"); game.getCurrentScreen().showPlayerThrow(playerNumber, throwAngle); } catch (Exception e) { Log.d("RemoteMethodListener/showPlayerThrow", e.toString()); } return 0; } public Boolean isReady() { boolean isReady = game.getCurrentScreen().isReady(); return Boolean.valueOf(isReady); } public static void setGame(Game androidGame) { game = androidGame; } }
package ca.krasnay.panelized; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import org.apache.wicket.AttributeModifier; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.attributes.AjaxRequestAttributes; import org.apache.wicket.ajax.attributes.AjaxRequestAttributes.EventPropagation; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.behavior.AttributeAppender; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.link.BookmarkablePageLink; import org.apache.wicket.markup.html.link.PopupSettings; import org.apache.wicket.markup.html.panel.EmptyPanel; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.markup.repeater.RepeatingView; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IComponentAssignedModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; /** * A drop-down menu in the Bootstrap style, consisting of a link to trigger * the menu and a list of links that are the menu items. * * Page links and actions can be added as menu items, and are subject to access * control checks. * * Note that if this panel is in a button group with other controls, we have to * attach it to a &lt;wicket:container&gt; instead of a real element. In this case * the menu is not AJAX-refreshable. You need to refresh a parent component instead. * * @author <a href="mailto:john@krasnay.ca">John Krasnay</a> */ public class DropDownMenuPanel extends Panel { private static class ExternalLinkAction implements Serializable { private IModel<String> url; private IModel<String> linkTextModel; private ExternalLinkAction(IModel<String> url, IModel<String> linkTextModel) { this.url = url; this.linkTextModel = linkTextModel; } } private static class PageLinkAction implements Serializable { private PageRef pageRef; private IModel<String> linkTextModel; private PopupSettings popupSettings; private PageLinkAction(PageRef pageRef, IModel<String> linkTextModel, PopupSettings popupSettings) { this.pageRef = pageRef; this.linkTextModel = linkTextModel; this.popupSettings = popupSettings; } } // Note we used to just use a String instance here, but it // broke in Wicket 1.5 serialization. private enum DummyAction { SEPARATOR } private AccessController accessController; /** * List of actions to be added to the menu. Each item in this list can * be one of the following: * * - a NamedAjaxAction * - the SEPARATOR_ACTION constant * - a PageLinkAction * - an ExternalLinkAction * - a DropDownMenuPanel representing a sub-menu * * We use this list to re-generate the menu each time it is rendered, to * take into account changing visibility, enablement, and permissions. * */ private List<Object> actions = new ArrayList<Object>(); private boolean alignRight; private ToolStyle style = ToolStyle.LINK; private RepeatingView itemRepeater; private RepeatingView panelRepeater; private IModel<String> toolTipModel; private boolean visible = true; /** * True if we want to add a separator. In order to avoid doubled-up * separators or separators at the end of the menu, addSeparator() just * sets this flag. The separator is only really added via checkSeparator() * when adding a "real" menu item. */ private boolean pendingSeparator; public DropDownMenuPanel(String id, String iconName, IModel<String> textModel, AccessController accessController) { super(id); this.accessController = accessController; setOutputMarkupId(true); WebMarkupContainer link = new WebMarkupContainer("link"); add(link); link.add(new IconLabelPanel("label", iconName, textModel)); link.add(new AttributeAppender("class", new AbstractReadOnlyModel<String>() { @Override public String getObject() { switch (style) { case BUTTON: return "pnl-Button"; case HOVER_BUTTON: return "pnl-Button pnl-Button--hover"; default: return "pnl-Link"; } } }, " ")); link.add(new AttributeModifier("title", new AbstractReadOnlyModel<String>() { @Override public String getObject() { return toolTipModel != null ? toolTipModel.getObject() : null; } })); WebMarkupContainer menu = new WebMarkupContainer("menu"); add(menu); menu.add(new AttributeAppender("class", new AbstractReadOnlyModel<String>() { @Override public String getObject() { return alignRight ? "pull-right" : null; } }, " ")); menu.add(itemRepeater = new RepeatingView("item")); add(panelRepeater = new RepeatingView("panel")); } public DropDownMenuPanel(String id, String iconName, AccessController acl) { this(id, iconName, null, acl); } public DropDownMenuPanel(String id, IModel<String> textModel, AccessController acl) { this(id, null, textModel, acl); } public void addAction(final NamedAjaxAction action) { if (accessController != null && accessController.canAccess(action.getClass())) { actions.add(action); if (action instanceof Panel) { panelRepeater.add((Panel) action); } } } public void addExternalLink(IModel<String> url, String linkText) { actions.add(new ExternalLinkAction(url, Model.of(linkText))); } public void addPageLink(final PageRef pageRef, IModel<String> linkTextModel) { addPageLink(pageRef, linkTextModel, null); } public void addPageLink(PageRef pageRef, IModel<String> linkTextModel, PopupSettings popupSettings) { if (accessController != null && accessController.canAccess(pageRef.getPageClass())) { actions.add(new PageLinkAction(pageRef, linkTextModel, popupSettings)); } } public void addSeparator() { actions.add(DummyAction.SEPARATOR); } // public DropDownMenuPanel addSubMenu(String name) { // DropDownMenuPanel subMenu = new DropDownMenuPanel("submenu", name, acl); // actions.add(subMenu); // return subMenu; // } private void checkSeparator() { if (pendingSeparator && itemRepeater.size() > 0) { WebMarkupContainer item = new WebMarkupContainer(itemRepeater.newChildId()); itemRepeater.add(item); item.add(new AttributeModifier("class", "pnl-DropDownMenu-separator")); WebMarkupContainer link = new WebMarkupContainer("link"); item.add(link); link.setVisible(false); link.add(new WebMarkupContainer("text")); item.add(new EmptyPanel("submenu")); } pendingSeparator = false; } /** * Clears the menu, removing all actions. */ public void clearMenu() { actions.clear(); itemRepeater.removeAll(); panelRepeater.removeAll(); } private void createLink(DropDownMenuPanel subMenu) { visible = true; checkSeparator(); WebMarkupContainer item = new WebMarkupContainer(itemRepeater.newChildId()); itemRepeater.add(item); item.add(new AttributeModifier("class", "dropdown-submenu")); item.add(new EmptyPanel("link").setVisible(false)); item.add(subMenu); // Remove class="dropdown-toggle", which interferes with the styling // subMenu.label.add(new AttributeModifier("class", "")); } private void createLink(ExternalLinkAction action) { visible = true; checkSeparator(); WebMarkupContainer item = new WebMarkupContainer(itemRepeater.newChildId()); itemRepeater.add(item); WebMarkupContainer link = new WebMarkupContainer("link"); item.add(link); link.add(new AttributeModifier("href", action.url)); link.add(new Label("text", action.linkTextModel).setRenderBodyOnly(true)); item.add(new EmptyPanel("submenu")); } private void createLink(final NamedAjaxAction action) { boolean actionVisible = action instanceof ConditionalAction ? ((ConditionalAction) action).isActionVisible() : true; if (accessController != null && accessController.canAccess(action.getClass()) && actionVisible) { visible = true; checkSeparator(); WebMarkupContainer item = new WebMarkupContainer(itemRepeater.newChildId()); itemRepeater.add(item); AjaxLink<Void> link = new AjaxLink<Void>("link") { @Override protected void disableLink(ComponentTag tag) { // // NOTE: we set force-disabled here to prevent a data table on the page // from re-enabling us. That's because the data table will disable page-level // menus if any of the rows are checked. // if (tag.getAttributes().containsKey("class")) { tag.put("class", tag.getAttribute("class") + " is-disabled force-disabled"); } else { tag.put("class", "is-disabled force-disabled"); } } @Override public boolean isEnabled() { if (action instanceof ConditionalAction) { return ((ConditionalAction) action).isActionEnabled(); } else { return super.isEnabled(); } } @Override public boolean isVisible() { if (action instanceof ConditionalAction) { return ((ConditionalAction) action).isActionVisible(); } else { return super.isVisible(); } } @Override public void onClick(AjaxRequestTarget target) { action.invoke(target); } @Override protected void updateAjaxAttributes(AjaxRequestAttributes attributes) { // Needed so Bootstrap hides the menu attributes.setEventPropagation(EventPropagation.BUBBLE); } }; item.add(link); IModel<String> linkTextModel = new AbstractReadOnlyModel<String>() { @Override public String getObject() { return action.getName(getLocale()); } }; link.add(new Label("text", linkTextModel).setRenderBodyOnly(true)); item.add(new EmptyPanel("submenu")); } } private void createLink(final PageLinkAction action) { if (true /*action.pageRef.canBeAccessedBy(acl)*/) { visible = true; checkSeparator(); WebMarkupContainer item = new WebMarkupContainer(itemRepeater.newChildId()); itemRepeater.add(item); BookmarkablePageLink<Void> link = new BookmarkablePageLink<Void>("link", action.pageRef.getPageClass(), action.pageRef.getPageParameters()) { @Override protected void disableLink(ComponentTag tag) { tag.put("class", "disabled"); } @Override public boolean isEnabled() { return action.pageRef.isEnabled(); } }; item.add(link); link.setPopupSettings(action.popupSettings); link.add(new Label("text", action.linkTextModel).setRenderBodyOnly(true)); item.add(new EmptyPanel("submenu")); } } @Override public boolean isVisible() { return visible; //itemRepeater.size() > 0; } public String newPanelId() { return panelRepeater.newChildId(); } @Override protected void onBeforeRender() { rebuildMenu(); super.onBeforeRender(); } /** * Rebuilds the menu. Called each time the component is rendered. */ private void rebuildMenu() { itemRepeater.removeAll(); pendingSeparator = false; visible = false; for (Object action : actions) { if (action instanceof NamedAjaxAction) { createLink((NamedAjaxAction) action); } else if (action instanceof PageLinkAction) { createLink((PageLinkAction) action); } else if (action instanceof ExternalLinkAction) { createLink((ExternalLinkAction) action); } else if (action instanceof DropDownMenuPanel) { createLink((DropDownMenuPanel) action); } else if (action == DummyAction.SEPARATOR) { pendingSeparator = true; } else { throw new RuntimeException("Unrecognized action class: " + action.getClass() + ", value '" + action + "'"); } } } public DropDownMenuPanel setAlignRight() { alignRight = true; return this; } public DropDownMenuPanel setStyle(ToolStyle style) { this.style = style; return this; } @SuppressWarnings({ "unchecked", "rawtypes" }) public DropDownMenuPanel setToolTip(IModel<String> model) { if (model instanceof IComponentAssignedModel) { this.toolTipModel = ((IComponentAssignedModel)model).wrapOnAssignment(this); } else{ this.toolTipModel = model; } return this; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.arrow.vector.schema; import static java.util.Arrays.asList; import static org.apache.arrow.vector.schema.VectorLayout.booleanVector; import static org.apache.arrow.vector.schema.VectorLayout.byteVector; import static org.apache.arrow.vector.schema.VectorLayout.dataVector; import static org.apache.arrow.vector.schema.VectorLayout.offsetVector; import static org.apache.arrow.vector.schema.VectorLayout.typeVector; import static org.apache.arrow.vector.schema.VectorLayout.validityVector; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.ArrowType.ArrowTypeVisitor; import org.apache.arrow.vector.types.pojo.ArrowType.Binary; import org.apache.arrow.vector.types.pojo.ArrowType.Bool; import org.apache.arrow.vector.types.pojo.ArrowType.Date; import org.apache.arrow.vector.types.pojo.ArrowType.Decimal; import org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeList; import org.apache.arrow.vector.types.pojo.ArrowType.FloatingPoint; import org.apache.arrow.vector.types.pojo.ArrowType.Int; import org.apache.arrow.vector.types.pojo.ArrowType.Interval; import org.apache.arrow.vector.types.pojo.ArrowType.Null; import org.apache.arrow.vector.types.pojo.ArrowType.Struct; import org.apache.arrow.vector.types.pojo.ArrowType.Time; import org.apache.arrow.vector.types.pojo.ArrowType.Timestamp; import org.apache.arrow.vector.types.pojo.ArrowType.Union; import org.apache.arrow.vector.types.pojo.ArrowType.Utf8; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; /** * The layout of vectors for a given type * It defines its own vectors followed by the vectors for the children * if it is a nested type (Struct_, List, Union) */ public class TypeLayout { public static TypeLayout getTypeLayout(final ArrowType arrowType) { TypeLayout layout = arrowType.accept(new ArrowTypeVisitor<TypeLayout>() { @Override public TypeLayout visit(Int type) { return newFixedWidthTypeLayout(dataVector(type.getBitWidth())); } @Override public TypeLayout visit(Union type) { List<VectorLayout> vectors; switch (type.getMode()) { case Dense: vectors = asList( // TODO: validate this validityVector(), typeVector(), offsetVector() // offset to find the vector ); break; case Sparse: vectors = asList( typeVector() // type of the value at the index or 0 if null ); break; default: throw new UnsupportedOperationException("Unsupported Union Mode: " + type.getMode()); } return new TypeLayout(vectors); } @Override public TypeLayout visit(Struct type) { List<VectorLayout> vectors = asList( validityVector() ); return new TypeLayout(vectors); } @Override public TypeLayout visit(Timestamp type) { return newFixedWidthTypeLayout(dataVector(64)); } @Override public TypeLayout visit(org.apache.arrow.vector.types.pojo.ArrowType.List type) { List<VectorLayout> vectors = asList( validityVector(), offsetVector() ); return new TypeLayout(vectors); } @Override public TypeLayout visit(FixedSizeList type) { List<VectorLayout> vectors = asList( validityVector() ); return new TypeLayout(vectors); } @Override public TypeLayout visit(FloatingPoint type) { int bitWidth; switch (type.getPrecision()) { case HALF: bitWidth = 16; break; case SINGLE: bitWidth = 32; break; case DOUBLE: bitWidth = 64; break; default: throw new UnsupportedOperationException("Unsupported Precision: " + type.getPrecision()); } return newFixedWidthTypeLayout(dataVector(bitWidth)); } @Override public TypeLayout visit(Decimal type) { // TODO: check size return newFixedWidthTypeLayout(dataVector(64)); // actually depends on the type fields } @Override public TypeLayout visit(Bool type) { return newFixedWidthTypeLayout(booleanVector()); } @Override public TypeLayout visit(Binary type) { return newVariableWidthTypeLayout(); } @Override public TypeLayout visit(Utf8 type) { return newVariableWidthTypeLayout(); } private TypeLayout newVariableWidthTypeLayout() { return newPrimitiveTypeLayout(validityVector(), offsetVector(), byteVector()); } private TypeLayout newPrimitiveTypeLayout(VectorLayout... vectors) { return new TypeLayout(asList(vectors)); } public TypeLayout newFixedWidthTypeLayout(VectorLayout dataVector) { return newPrimitiveTypeLayout(validityVector(), dataVector); } @Override public TypeLayout visit(Null type) { return new TypeLayout(Collections.<VectorLayout>emptyList()); } @Override public TypeLayout visit(Date type) { return newFixedWidthTypeLayout(dataVector(64)); } @Override public TypeLayout visit(Time type) { return newFixedWidthTypeLayout(dataVector(type.getBitWidth())); } @Override public TypeLayout visit(Interval type) { // TODO: check size switch (type.getUnit()) { case DAY_TIME: return newFixedWidthTypeLayout(dataVector(64)); case YEAR_MONTH: return newFixedWidthTypeLayout(dataVector(64)); default: throw new UnsupportedOperationException("Unknown unit " + type.getUnit()); } } }); return layout; } private final List<VectorLayout> vectors; @JsonCreator public TypeLayout(@JsonProperty("vectors") List<VectorLayout> vectors) { super(); this.vectors = Preconditions.checkNotNull(vectors); } public TypeLayout(VectorLayout... vectors) { this(asList(vectors)); } public List<VectorLayout> getVectors() { return vectors; } @JsonIgnore public List<ArrowVectorType> getVectorTypes() { List<ArrowVectorType> types = new ArrayList<>(vectors.size()); for (VectorLayout vector : vectors) { types.add(vector.getType()); } return types; } public String toString() { return vectors.toString(); } @Override public int hashCode() { return vectors.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TypeLayout other = (TypeLayout) obj; return vectors.equals(other.vectors); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.core.JsonParseException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.nullValue; public class XContentParserTests extends ESTestCase { public void testReadList() throws IOException { assertThat(readList("{\"foo\": [\"bar\"]}"), contains("bar")); assertThat(readList("{\"foo\": [\"bar\",\"baz\"]}"), contains("bar", "baz")); assertThat(readList("{\"foo\": [1, 2, 3], \"bar\": 4}"), contains(1, 2, 3)); assertThat(readList("{\"foo\": [{\"bar\":1},{\"baz\":2},{\"qux\":3}]}"), hasSize(3)); assertThat(readList("{\"foo\": [null]}"), contains(nullValue())); assertThat(readList("{\"foo\": []}"), hasSize(0)); assertThat(readList("{\"foo\": [1]}"), contains(1)); assertThat(readList("{\"foo\": [1,2]}"), contains(1, 2)); assertThat(readList("{\"foo\": [{},{},{},{}]}"), hasSize(4)); } public void testReadListThrowsException() throws IOException { // Calling XContentParser.list() or listOrderedMap() to read a simple // value or object should throw an exception assertReadListThrowsException("{\"foo\": \"bar\"}"); assertReadListThrowsException("{\"foo\": 1, \"bar\": 2}"); assertReadListThrowsException("{\"foo\": {\"bar\":\"baz\"}}"); } @SuppressWarnings("unchecked") private <T> List<T> readList(String source) throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { XContentParser.Token token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("foo")); return (List<T>) (randomBoolean() ? parser.listOrderedMap() : parser.list()); } } private void assertReadListThrowsException(String source) { try { readList(source); fail("should have thrown a parse exception"); } catch (Exception e) { assertThat(e, instanceOf(ElasticsearchParseException.class)); assertThat(e.getMessage(), containsString("Failed to parse list")); } } public void testReadMapStrings() throws IOException { Map<String, String> map = readMapStrings("{\"foo\": {\"kbar\":\"vbar\"}}"); assertThat(map.get("kbar"), equalTo("vbar")); assertThat(map.size(), equalTo(1)); map = readMapStrings("{\"foo\": {\"kbar\":\"vbar\", \"kbaz\":\"vbaz\"}}"); assertThat(map.get("kbar"), equalTo("vbar")); assertThat(map.get("kbaz"), equalTo("vbaz")); assertThat(map.size(), equalTo(2)); map = readMapStrings("{\"foo\": {}}"); assertThat(map.size(), equalTo(0)); } private Map<String, String> readMapStrings(String source) throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { XContentParser.Token token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("foo")); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); return randomBoolean() ? parser.mapStringsOrdered() : parser.mapStrings(); } } @SuppressWarnings("deprecation") // #isBooleanValueLenient() and #booleanValueLenient() are the test subjects public void testReadLenientBooleans() throws IOException { // allow String, boolean and int representations of lenient booleans String falsy = randomFrom("\"off\"", "\"no\"", "\"0\"", "0", "\"false\"", "false"); String truthy = randomFrom("\"on\"", "\"yes\"", "\"1\"", "1", "\"true\"", "true"); try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"foo\": " + falsy + ", \"bar\": " + truthy + "}")) { XContentParser.Token token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("foo")); token = parser.nextToken(); assertThat(token, isIn( Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_BOOLEAN))); assertTrue(parser.isBooleanValueLenient()); assertFalse(parser.booleanValueLenient()); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("bar")); token = parser.nextToken(); assertThat(token, isIn( Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_BOOLEAN))); assertTrue(parser.isBooleanValueLenient()); assertTrue(parser.booleanValueLenient()); } } public void testReadBooleansFailsForLenientBooleans() throws IOException { String falsy = randomFrom("\"off\"", "\"no\"", "\"0\"", "0"); String truthy = randomFrom("\"on\"", "\"yes\"", "\"1\"", "1"); try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"foo\": " + falsy + ", \"bar\": " + truthy + "}")) { XContentParser.Token token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("foo")); token = parser.nextToken(); assertThat(token, isIn(Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_NUMBER))); assertFalse(parser.isBooleanValue()); if (token.equals(XContentParser.Token.VALUE_STRING)) { expectThrows(IllegalArgumentException.class, parser::booleanValue); } else { expectThrows(JsonParseException.class, parser::booleanValue); } token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("bar")); token = parser.nextToken(); assertThat(token, isIn(Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_NUMBER))); assertFalse(parser.isBooleanValue()); if (token.equals(XContentParser.Token.VALUE_STRING)) { expectThrows(IllegalArgumentException.class, parser::booleanValue); } else { expectThrows(JsonParseException.class, parser::booleanValue); } } } public void testReadBooleans() throws IOException { String falsy = randomFrom("\"false\"", "false"); String truthy = randomFrom("\"true\"", "true"); try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"foo\": " + falsy + ", \"bar\": " + truthy + "}")) { XContentParser.Token token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("foo")); token = parser.nextToken(); assertThat(token, isIn(Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_BOOLEAN))); assertTrue(parser.isBooleanValue()); assertFalse(parser.booleanValue()); token = parser.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("bar")); token = parser.nextToken(); assertThat(token, isIn(Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_BOOLEAN))); assertTrue(parser.isBooleanValue()); assertTrue(parser.booleanValue()); } } public void testEmptyList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().startObject() .startArray("some_array") .endArray().endObject(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals(Collections.emptyList(), parser.list()); } } public void testSimpleList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().startObject() .startArray("some_array") .value(1) .value(3) .value(0) .endArray().endObject(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals(Arrays.asList(1, 3, 0), parser.list()); } } public void testNestedList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().startObject() .startArray("some_array") .startArray().endArray() .startArray().value(1).value(3).endArray() .startArray().value(2).endArray() .endArray().endObject(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals( Arrays.asList(Collections.<Integer>emptyList(), Arrays.asList(1, 3), Arrays.asList(2)), parser.list()); } } public void testNestedMapInList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().startObject() .startArray("some_array") .startObject().field("foo", "bar").endObject() .startObject().endObject() .endArray().endObject(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals( Arrays.asList(singletonMap("foo", "bar"), emptyMap()), parser.list()); } } }
package pmurray_at_bigpond_dot_com.arddrive; import android.Manifest; import android.bluetooth.BluetoothDevice; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v4.app.ActivityCompat; import android.support.v4.content.LocalBroadcastManager; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Base64; import android.view.LayoutInflater; import android.view.View; import android.view.Menu; import android.view.MenuItem; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.SeekBar; import android.widget.TextView; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static pmurray_at_bigpond_dot_com.arddrive.BluetoothService.*; public class MainActivity extends AppCompatActivity { private static final int RETRY_CHOOSE_BLUETOOTH = 0xBEEF + 1; static class MyThing { final String mac; final String act; final int tx, rx, nb; final String uuid; final String txt; MyThing(Intent msg) { if (msg.hasExtra(BluetoothDevice.EXTRA_DEVICE)) { BluetoothDevice device = msg.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); mac = device.getName(); } else { mac = "NO DEVICE"; } String s = msg.getAction(); if (s != null) s = s.substring(s.lastIndexOf('.') + 1); act = (s); if (msg.hasExtra(EXTRA_SERIAL_TX)) { tx = msg.getIntExtra(EXTRA_SERIAL_TX, -1); } else { tx = -1; } if (msg.hasExtra(EXTRA_SERIAL_RX)) { rx = msg.getIntExtra(EXTRA_SERIAL_RX, -1); } else rx = -1; if (msg.hasExtra(BluetoothDevice.EXTRA_UUID)) { uuid = (msg.getParcelableExtra(BluetoothDevice.EXTRA_UUID).toString()); } else uuid = null; if (msg.hasExtra(EXTRA_BYTES_N)) { nb = msg.getIntExtra(EXTRA_BYTES_N, -1); } else nb = -1; StringBuilder sb = new StringBuilder(); sb.append(' '); if (msg.hasExtra(EXTRA_BYTES)) { byte[] b = msg.getByteArrayExtra(EXTRA_BYTES); int bytes = nb == -1 || nb > b.length ? b.length : nb; sb.append(" ["); for (int i = 0; i < bytes; i++) { byte bb = b[i]; if (bb >= ' ' && bb < 127) sb.append((char) bb); else { sb.append('['); byte bbb = (byte) ((bb >> 4) & 0xf); sb.append((char) (bbb + (bbb < 10 ? '0' : 'a' - 10))); bbb = (byte) ((bb) & 0xf); sb.append((char) (bbb + (bbb < 10 ? '0' : 'a' - 10))); sb.append(']'); } } sb.append("]"); } if (msg.hasExtra(EXTRA_EXCEPTION)) { sb.append(msg.getStringExtra(EXTRA_EXCEPTION)); } txt = sb.toString(); } } static class BroadcastsAdapter extends ArrayAdapter<MyThing> { public BroadcastsAdapter(Context context, List<MyThing> messages) { super(context, R.layout.device_list_item, messages); } @Override public View getView(int position, View convertView, ViewGroup parent) { // Get the data item for this position MyThing msg = getItem(position); // Check if an existing view is being reused, otherwise inflate the view if (convertView == null) { convertView = LayoutInflater.from(getContext()).inflate(R.layout.broadcast_list_item, parent, false); } // Lookup view for data population TextView mac = (TextView) convertView.findViewById(R.id.broadcast_mac); TextView act = (TextView) convertView.findViewById(R.id.broadcast_action); TextView txt = (TextView) convertView.findViewById(R.id.broadcast_text); TextView nb = (TextView) convertView.findViewById(R.id.broadcast_nBytes); TextView rx = (TextView) convertView.findViewById(R.id.broadcast_rxSerial); TextView tx = (TextView) convertView.findViewById(R.id.broadcast_txSerial); TextView uuid = (TextView) convertView.findViewById(R.id.broadcast_uuid); mac.setText(msg.mac); act.setText(msg.act); txt.setText(msg.txt); nb.setText(msg.nb == -1 ? null : Integer.toString(msg.nb)); tx.setText(msg.tx == -1 ? null : Integer.toString(msg.tx)); rx.setText(msg.rx == -1 ? null : Integer.toString(msg.rx)); uuid.setText(msg.uuid); return convertView; } } BroadcastsAdapter broadcastsAdapter; final BroadcastReceiver broadcastReciever = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { while (broadcastsAdapter.getCount() >= 20) { broadcastsAdapter.remove(broadcastsAdapter.getItem(0)); } broadcastsAdapter.add(new MyThing(intent)); } }; final BroadcastReceiver btReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { while (broadcastsAdapter.getCount() >= 20) { broadcastsAdapter.remove(broadcastsAdapter.getItem(0)); } broadcastsAdapter.add(new MyThing(intent)); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); // fragment goes here? broadcastsAdapter = new BroadcastsAdapter(this, new ArrayList<MyThing>()); ListView broadcastList = (ListView) findViewById(R.id.broadcastList); broadcastList.setAdapter(broadcastsAdapter); } @Override protected void onStart() { super.onStart(); LocalBroadcastManager.getInstance(getApplicationContext()).registerReceiver(broadcastReciever, addAllBroadcasts(new IntentFilter())); registerReceiver(btReceiver, lowLevelBtFiter()); } @Override protected void onStop() { LocalBroadcastManager.getInstance(getApplicationContext()).unregisterReceiver(broadcastReciever); unregisterReceiver(btReceiver); super.onStop(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_select_device) { chooseBluetooth(); return true; } return super.onOptionsItemSelected(item); } protected void chooseBluetooth() { if (obtainBlueToothPermission(RETRY_CHOOSE_BLUETOOTH)) { startActivity(new Intent(this, SelectDevice.class)); } } protected boolean obtainBlueToothPermission(final int callbackCode) { if (ActivityCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH) == PackageManager.PERMISSION_GRANTED) { return true; } else { if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.BLUETOOTH)) { Snackbar.make(findViewById(android.R.id.content), R.string.bluetoothRationale, Snackbar.LENGTH_INDEFINITE) .setAction("OK", new View.OnClickListener() { @Override public void onClick(View view) { retry(callbackCode); } }) .setAction("Cancel", null) .show(); } else { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.BLUETOOTH}, callbackCode); } return false; } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String permissions[], @NonNull int[] grantResults) { for (int r : grantResults) { if (r != PackageManager.PERMISSION_GRANTED) { Snackbar.make(findViewById(android.R.id.content), R.string.permissionRefused, Snackbar.LENGTH_LONG).show(); return; } } retry(requestCode); } protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode == RESULT_OK) { retry(requestCode); } else { Snackbar.make(findViewById(android.R.id.content), R.string.activityUnsuccessful, Snackbar.LENGTH_LONG).show(); } } private void retry(int requestCode) { switch (requestCode) { case RETRY_CHOOSE_BLUETOOTH: chooseBluetooth(); break; } } protected IntentFilter lowLevelBtFiter() { IntentFilter f = new IntentFilter(); f.addAction(BluetoothDevice.ACTION_ACL_CONNECTED); f.addAction(BluetoothDevice.ACTION_ACL_DISCONNECT_REQUESTED); f.addAction(BluetoothDevice.ACTION_ACL_DISCONNECTED); f.addAction(BluetoothDevice.ACTION_BOND_STATE_CHANGED); f.addAction(BluetoothDevice.ACTION_CLASS_CHANGED); f.addAction(BluetoothDevice.ACTION_FOUND); f.addAction(BluetoothDevice.ACTION_NAME_CHANGED); f.addAction(BluetoothDevice.ACTION_PAIRING_REQUEST); f.addAction(BluetoothDevice.ACTION_UUID); return f; } }