repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
broadinstitute/hellbender
src/test/java/org/broadinstitute/hellbender/tools/spark/pipelines/SortSamSparkIntegrationTest.java
7500
package org.broadinstitute.hellbender.tools.spark.pipelines; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.ValidationStringency; import org.apache.spark.api.java.JavaRDD; import org.broadinstitute.barclay.argparser.CommandLineException; import org.broadinstitute.hellbender.CommandLineProgramTest; import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions; import org.broadinstitute.hellbender.engine.GATKPath; import org.broadinstitute.hellbender.engine.ReadsDataSource; import org.broadinstitute.hellbender.engine.ReadsPathDataSource; import org.broadinstitute.hellbender.engine.spark.GATKSparkTool; import org.broadinstitute.hellbender.engine.spark.SparkContextFactory; import org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSource; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.read.GATKRead; import org.broadinstitute.hellbender.testutils.ArgumentsBuilder; import org.broadinstitute.hellbender.testutils.BaseTest; import org.broadinstitute.hellbender.testutils.SamAssertionUtils; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.util.List; import java.util.stream.Collectors; public final class SortSamSparkIntegrationTest extends CommandLineProgramTest { public static final String COUNT_READS_SAM = "count_reads.sam"; public static final String COORDINATE_SAM = "count_reads_sorted.sam"; public static final String QUERY_NAME_BAM = "count_reads.bam"; public static final String COORDINATE_BAM = "count_reads_sorted.bam"; public static final String COORDINATE_CRAM = "count_reads_sorted.cram"; public static final String QUERY_NAME_CRAM = "count_reads.cram"; public static final String REF = "count_reads.fasta"; public static final String CRAM = ".cram"; public static final String BAM = ".bam"; public static final String SAM = ".sam"; @DataProvider(name="sortbams") public Object[][] sortBAMData() { return new Object[][] { {COUNT_READS_SAM, COORDINATE_SAM, null, SAM, SAMFileHeader.SortOrder.coordinate}, {QUERY_NAME_BAM, COORDINATE_BAM, null, BAM, SAMFileHeader.SortOrder.coordinate}, {QUERY_NAME_CRAM, COORDINATE_CRAM, REF, BAM, SAMFileHeader.SortOrder.coordinate}, {QUERY_NAME_CRAM, COORDINATE_CRAM, REF, CRAM, SAMFileHeader.SortOrder.coordinate}, {QUERY_NAME_BAM, COORDINATE_BAM, REF, CRAM, SAMFileHeader.SortOrder.coordinate}, {COORDINATE_SAM, COUNT_READS_SAM, null, SAM, SAMFileHeader.SortOrder.queryname}, {COORDINATE_BAM, QUERY_NAME_BAM, null, BAM, SAMFileHeader.SortOrder.queryname}, {COORDINATE_CRAM, QUERY_NAME_CRAM, REF, BAM, SAMFileHeader.SortOrder.queryname}, {COORDINATE_CRAM, QUERY_NAME_CRAM, REF, CRAM, SAMFileHeader.SortOrder.queryname}, {COORDINATE_BAM, QUERY_NAME_BAM, REF, CRAM, SAMFileHeader.SortOrder.queryname}, }; } @Test(dataProvider="sortbams", groups="spark") public void testSortBAMs( final String inputFileName, final String expectedOutputFileName, final String referenceFileName, final String outputExtension, final SAMFileHeader.SortOrder sortOrder) throws Exception { final File inputFile = getTestFile(inputFileName); final File expectedOutputFile = getTestFile(expectedOutputFileName); final File actualOutputFile = createTempFile("sort_sam", outputExtension); File referenceFile = null == referenceFileName ? null : getTestFile(referenceFileName); final SamReaderFactory factory = SamReaderFactory.makeDefault(); ArgumentsBuilder args = new ArgumentsBuilder(); args.addInput(inputFile); args.addOutput(actualOutputFile); if (null != referenceFile) { args.addReference(referenceFile); factory.referenceSequence(referenceFile); } args.add(StandardArgumentDefinitions.SORT_ORDER_LONG_NAME, sortOrder.name()); this.runCommandLine(args); //test files are exactly equal SamAssertionUtils.assertSamsEqual(actualOutputFile, expectedOutputFile, ValidationStringency.DEFAULT_STRINGENCY, referenceFile); //test sorting matches htsjdk try(ReadsDataSource in = new ReadsPathDataSource(actualOutputFile.toPath(), factory )) { BaseTest.assertSorted(Utils.stream(in).map(read -> read.convertToSAMRecord(in.getHeader())).iterator(), sortOrder.getComparatorInstance()); } } // This test is disabled until https://github.com/broadinstitute/gatk/issues/5881 is fixed @Test(enabled = false, dataProvider="sortbams", groups="spark") public void testSortBAMsSharded( final String inputFileName, final String unused, final String referenceFileName, final String outputExtension, final SAMFileHeader.SortOrder sortOrder) { final File inputFile = getTestFile(inputFileName); final File actualOutputFile = createTempFile("sort_sam", outputExtension); File referenceFile = null == referenceFileName ? null : getTestFile(referenceFileName); ArgumentsBuilder args = new ArgumentsBuilder(); args.addInput(inputFile); args.addOutput(actualOutputFile); if (null != referenceFile) { args.addReference(referenceFile); } args.add(StandardArgumentDefinitions.SORT_ORDER_LONG_NAME, sortOrder.name()); args.add(GATKSparkTool.SHARDED_OUTPUT_LONG_NAME,true); args.add(GATKSparkTool.NUM_REDUCERS_LONG_NAME, "2"); this.runCommandLine(args); final ReadsSparkSource source = new ReadsSparkSource(SparkContextFactory.getTestSparkContext()); final JavaRDD<GATKRead> reads = source.getParallelReads(new GATKPath(actualOutputFile.getAbsolutePath()), referenceFile == null ? null : new GATKPath(referenceFile.getAbsolutePath())); final SAMFileHeader header = source.getHeader(new GATKPath(actualOutputFile.getAbsolutePath()), referenceFileName == null ? null : new GATKPath(referenceFile.getAbsolutePath())); final List<SAMRecord> reloadedReads = reads.collect().stream().map(read -> read.convertToSAMRecord(header)).collect(Collectors.toList()); BaseTest.assertSorted(reloadedReads.iterator(), sortOrder.getComparatorInstance(), reloadedReads.stream().map(SAMRecord::getSAMString).collect(Collectors.joining("\n"))); } @DataProvider public Object[][] getInvalidSortOrders(){ return new Object[][]{ {SAMFileHeader.SortOrder.unknown}, {SAMFileHeader.SortOrder.unsorted}, {SAMFileHeader.SortOrder.duplicate} }; } @Test(expectedExceptions = CommandLineException.BadArgumentValue.class, dataProvider = "getInvalidSortOrders") public void testBadSortOrders(SAMFileHeader.SortOrder badOrder){ final File unsortedBam = new File(getTestDataDir(), QUERY_NAME_BAM); ArgumentsBuilder args = new ArgumentsBuilder(); args.addInput(unsortedBam); args.addOutput(createTempFile("sort_bam_spark", BAM)); args.add(StandardArgumentDefinitions.SORT_ORDER_LONG_NAME, badOrder.toString()); this.runCommandLine(args); } }
bsd-3-clause
LWJGL-CI/lwjgl3
modules/lwjgl/zstd/src/generated/java/org/lwjgl/util/zstd/LibZstd.java
1182
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.util.zstd; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; /** Initializes the zstd shared library. */ final class LibZstd { static { String libName = Platform.mapLibraryNameBundled("lwjgl_zstd"); Library.loadSystem(System::load, System::loadLibrary, LibZstd.class, "org.lwjgl.zstd", libName); MemoryAllocator allocator = getAllocator(Configuration.DEBUG_MEMORY_ALLOCATOR_INTERNAL.get(true)); setupMalloc( allocator.getMalloc(), allocator.getCalloc(), allocator.getRealloc(), allocator.getFree(), allocator.getAlignedAlloc(), allocator.getAlignedFree() ); } private LibZstd() { } static void initialize() { // intentionally empty to trigger static initializer } private static native void setupMalloc( long malloc, long calloc, long realloc, long free, long aligned_alloc, long aligned_free ); }
bsd-3-clause
fnussber/ocs
bundle/jsky.app.ot/src/main/java/jsky/app/ot/gemini/editor/offset/AbstractOffsetPosListEditor.java
10734
// Copyright 1997 Association for Universities for Research in Astronomy, Inc., // Observatory Control System, Gemini Telescopes Project. // See the file LICENSE for complete details. // // $Id: AbstractOffsetPosListEditor.java 47000 2012-07-26 19:15:10Z swalker $ // package jsky.app.ot.gemini.editor.offset; import edu.gemini.pot.sp.ISPObservation; import edu.gemini.pot.sp.ISPSeqComponent; import edu.gemini.shared.gui.RotatedButtonUI; import edu.gemini.shared.gui.text.AbstractDocumentListener; import edu.gemini.shared.util.immutable.None; import edu.gemini.shared.util.immutable.Option; import edu.gemini.shared.util.immutable.Some; import edu.gemini.spModel.data.IOffsetPosListProvider; import edu.gemini.spModel.gemini.seqcomp.SeqRepeatOffsetBase; import edu.gemini.spModel.guide.GuideProbe; import edu.gemini.spModel.guide.GuideProbeUtil; import edu.gemini.spModel.obscomp.SPInstObsComp; import edu.gemini.spModel.target.env.GuideProbeTargets; import edu.gemini.spModel.target.env.TargetEnvironment; import edu.gemini.spModel.target.obsComp.TargetObsComp; import edu.gemini.spModel.target.offset.OffsetPosBase; import edu.gemini.spModel.target.offset.OffsetPosList; import edu.gemini.spModel.target.offset.OffsetPosSelection; import edu.gemini.spModel.telescope.IssPort; import edu.gemini.spModel.telescope.IssPortProvider; import jsky.app.ot.OTOptions; import jsky.app.ot.editor.OtItemEditor; import jsky.app.ot.util.OtColor; import javax.swing.BorderFactory; import javax.swing.JPanel; import javax.swing.JToggleButton; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.Document; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import static jsky.app.ot.util.OtColor.DARKER_BG_GREY; import static jsky.app.ot.util.OtColor.LIGHT_GREY; /** * This is the editor for Offset Iterator component. It allows a list of * offset positions to be entered and ordered. * * @see edu.gemini.spModel.target.offset.OffsetPos */ public abstract class AbstractOffsetPosListEditor<P extends OffsetPosBase> extends OtItemEditor<ISPSeqComponent, SeqRepeatOffsetBase<P>> { private OffsetPosListEditorConfig<P> config; private AbstractOffsetPosTableEditor<P> posTableCtrl; private TargetObsComp oldTargetObsComp; private final JToggleButton advancedButton; private final AdvancedGuiderSelectionEditor<P> guiderSelectionEditor; private JPanel editorPanel; private final PropertyChangeListener targetListWatcher = new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { updateReferencedGuiders(); } }; private final DocumentListener titleTextListener = new AbstractDocumentListener() { public void textChanged(DocumentEvent docEvent, String newText) { getDataObject().setTitle(newText); } }; /** * The constructor initializes the user interface. */ protected AbstractOffsetPosListEditor() { advancedButton = new JToggleButton("Advanced Guiding Options") {{ setUI(new RotatedButtonUI(RotatedButtonUI.Orientation.topToBottom)); setBackground(OtColor.VERY_LIGHT_GREY); }}; guiderSelectionEditor = new AdvancedGuiderSelectionEditor<P>(); } protected void init(final OffsetPosListEditorConfig<P> config) { this.config = config; trackTitleChanges(true); posTableCtrl = config.getTableEditor(); // The content panel holds the editors and the advanced guiding // selection options (when opened). final JPanel content = new JPanel(new BorderLayout(10, 0)) {{ add(config.getPan(), BorderLayout.CENTER); }}; // The main container panel for everything. Holds the content panel // and the advanced guiding button. editorPanel = new JPanel(new GridBagLayout()) {{ setBorder(BorderFactory.createEmptyBorder(10, 5, 5, 5)); add(content, new GridBagConstraints() {{ gridx = 0; gridy = 0; weightx = 1.0; weighty = 1.0; fill = BOTH; anchor = NORTHWEST; }}); add(advancedButton, new GridBagConstraints() {{ gridx = 1; gridy = 0; anchor = NORTH; insets = new Insets(0, 2, 0, 0); }}); }}; final JPanel wrap = wrap(guiderSelectionEditor.pan); advancedButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final JToggleButton tb = (JToggleButton) e.getSource(); if (tb.isSelected()) { tb.setBackground(OtColor.LIGHT_ORANGE); content.add(wrap, BorderLayout.EAST); } else { tb.setBackground(OtColor.VERY_LIGHT_GREY); content.remove(wrap); } content.validate(); } }); } private void trackTitleChanges(boolean enabled) { final Document doc = config.getPan().getTitleTextField().getDocument(); doc.removeDocumentListener(titleTextListener); if (enabled) { doc.addDocumentListener(titleTextListener); } } /** * Return the window containing the editor */ public JPanel getWindow() { return editorPanel; } /** * Apply any changes made in this editor. */ public void afterApply() { } @Override protected void cleanup() { if (oldTargetObsComp != null) { oldTargetObsComp.removePropertyChangeListener(TargetObsComp.TARGET_ENV_PROP, targetListWatcher); } oldTargetObsComp = getContextTargetObsCompDataObject(); if (oldTargetObsComp != null) { oldTargetObsComp.addPropertyChangeListener(TargetObsComp.TARGET_ENV_PROP, targetListWatcher); } } /** * Set the data object corresponding to this editor. */ public void init() { //noinspection unchecked final IOffsetPosListProvider<P> sro = getDataObject(); // Set the title trackTitleChanges(false); config.getPan().getTitleTextField().setText(sro.getTitle()); trackTitleChanges(true); // Get the current offset list and fill in the table widget final OffsetPosList<P> opl = getDataObject().getPosList(); guiderSelectionEditor.setPosList(opl); // need to know if this pos list is editable but the init() method is // called from OtItemEditor before the editable state is set up, so // compute it here to pass along to the editors. final boolean editable = OTOptions.areRootAndCurrentObsIfAnyEditable(getProgram(), getContextObservation()); posTableCtrl.setPositionList(this, getIssPort(), editable); config.getPosEditor().setPositionList(this, editable); // Remember the selection final List<P> selList = OffsetPosSelection.apply(getNode()).selectedPositions(opl); config.getPosEditor().setIssPort(getIssPort()); updateReferencedGuiders(); // restructures the table, losing the selection // Make sure that something gets selected, if the table isn't empty if ((selList.size() == 0) && (opl.size() > 0)) { selList.add(opl.getPositionAt(0)); } OffsetPosSelection.select(opl, selList).commit(getNode()); } private IssPort getIssPort() { final SPInstObsComp inst = getContextInstrumentDataObject(); return (inst instanceof IssPortProvider) ? ((IssPortProvider) inst).getIssPort() : IssPort.DEFAULT; } protected void updateReferencedGuiders() { // Figure out which of the possible wfs types are in use and the tags // that correspond. Option<TargetEnvironment> envOpt = None.instance(); Set<GuideProbe> referenced = Collections.emptySet(); final TargetObsComp toc = getContextTargetObsCompDataObject(); if (toc != null) { envOpt = new Some<TargetEnvironment>(toc.getTargetEnvironment()); referenced = envOpt.getValue().getOrCreatePrimaryGuideGroup().getReferencedGuiders(); } // Make sure that the position links are in sync with the referenced // targets. When editing the offset component and adding targets via // the TPE, links will be missing since the target component changes // haven't been saved yet. GuideSync will keep this up-to-date when // changes to the target env are saved, but the UI doesn't update if // you're editing a position list when the target list is updated via // the TPE or some other means. // OffsetPosList<P> opl = config.getPosEditor().getPositionList(); // GuideSync.updatePosList(opl, envOpt, oldEnv); final ISPObservation ctxObs = getContextObservation(); final Set<GuideProbe> available = (ctxObs == null) ? Collections.<GuideProbe>emptySet() : GuideProbeUtil.instance.getAvailableGuiders(getContextObservation()); final Set<GuideProbe> noPrimary = new HashSet<GuideProbe>(); if (!envOpt.isEmpty()) { for (GuideProbeTargets gt : envOpt.getValue().getOrCreatePrimaryGuideGroup()) { if (gt.getPrimary().isEmpty()) { noPrimary.add(gt.getGuider()); } } } // Update the table to show columns for each in-use category. guiderSelectionEditor.setAvailableGuiders(available); config.getTableEditor().syncGuideState(referenced, available, noPrimary); config.getPosEditor().syncGuideState(available, noPrimary); } // Wraps the tracking details editor with a border and a lighter background // so that it shows. private static JPanel wrap(final JPanel pan) { return new JPanel(new BorderLayout()) {{ setBackground(LIGHT_GREY); setBorder(BorderFactory.createCompoundBorder( BorderFactory.createLineBorder(DARKER_BG_GREY), BorderFactory.createEmptyBorder(5, 5, 5, 5) )); add(pan, BorderLayout.CENTER); setMinimumSize(new Dimension(200, 0)); setPreferredSize(new Dimension(200, 0)); }}; } }
bsd-3-clause
hispindia/dhis2-Core
dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/dataset/DefaultDataSetService.java
14703
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.dataset; import static com.google.common.base.Preconditions.checkNotNull; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.hisp.dhis.category.CategoryOptionCombo; import org.hisp.dhis.dataapproval.DataApprovalService; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataentryform.DataEntryForm; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.query.QueryParserException; import org.hisp.dhis.security.Authorities; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.google.common.collect.Lists; /** * @author Lars Helge Overland */ @Service( "org.hisp.dhis.dataset.DataSetService" ) public class DefaultDataSetService implements DataSetService { // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private final DataSetStore dataSetStore; private final LockExceptionStore lockExceptionStore; private final DataApprovalService dataApprovalService; private CurrentUserService currentUserService; public DefaultDataSetService( DataSetStore dataSetStore, LockExceptionStore lockExceptionStore, @Lazy DataApprovalService dataApprovalService, CurrentUserService currentUserService ) { checkNotNull( dataSetStore ); checkNotNull( lockExceptionStore ); checkNotNull( dataApprovalService ); checkNotNull( currentUserService ); this.dataSetStore = dataSetStore; this.lockExceptionStore = lockExceptionStore; this.dataApprovalService = dataApprovalService; this.currentUserService = currentUserService; } // ------------------------------------------------------------------------- // DataSet // ------------------------------------------------------------------------- @Override @Transactional public long addDataSet( DataSet dataSet ) { dataSetStore.save( dataSet ); return dataSet.getId(); } @Override @Transactional public void updateDataSet( DataSet dataSet ) { dataSetStore.update( dataSet ); } @Override @Transactional public void deleteDataSet( DataSet dataSet ) { dataSetStore.delete( dataSet ); } @Override @Transactional( readOnly = true ) public DataSet getDataSet( long id ) { return dataSetStore.get( id ); } @Override @Transactional( readOnly = true ) public DataSet getDataSet( String uid ) { return dataSetStore.getByUid( uid ); } @Override @Transactional( readOnly = true ) public DataSet getDataSetNoAcl( String uid ) { return dataSetStore.getByUidNoAcl( uid ); } @Override @Transactional( readOnly = true ) public List<DataSet> getDataSetsByDataEntryForm( DataEntryForm dataEntryForm ) { return dataSetStore.getDataSetsByDataEntryForm( dataEntryForm ); } @Override @Transactional( readOnly = true ) public List<DataSet> getAllDataSets() { return dataSetStore.getAll(); } @Override @Transactional( readOnly = true ) public List<DataSet> getDataSetsByPeriodType( PeriodType periodType ) { return dataSetStore.getDataSetsByPeriodType( periodType ); } @Override @Transactional( readOnly = true ) public List<DataSet> getUserDataRead( User user ) { if ( user == null ) { return Lists.newArrayList(); } return user.isSuper() ? getAllDataSets() : dataSetStore.getDataReadAll( user ); } @Override @Transactional( readOnly = true ) public List<DataSet> getAllDataRead() { User user = currentUserService.getCurrentUser(); return getUserDataRead( user ); } @Override @Transactional( readOnly = true ) public List<DataSet> getAllDataWrite() { User user = currentUserService.getCurrentUser(); return getUserDataWrite( user ); } @Override @Transactional( readOnly = true ) public List<DataSet> getUserDataWrite( User user ) { if ( user == null ) { return Lists.newArrayList(); } return user.isSuper() ? getAllDataSets() : dataSetStore.getDataWriteAll( user ); } @Override @Transactional( readOnly = true ) public List<DataSet> getDataSetsNotAssignedToOrganisationUnits() { return dataSetStore.getDataSetsNotAssignedToOrganisationUnits(); } // ------------------------------------------------------------------------- // DataSet LockExceptions // ------------------------------------------------------------------------- @Override @Transactional public long addLockException( LockException lockException ) { lockExceptionStore.save( lockException ); return lockException.getId(); } @Override @Transactional public void updateLockException( LockException lockException ) { lockExceptionStore.update( lockException ); } @Override @Transactional public void deleteLockException( LockException lockException ) { lockExceptionStore.delete( lockException ); } @Override @Transactional( readOnly = true ) public LockException getLockException( long id ) { return lockExceptionStore.get( id ); } @Override @Transactional( readOnly = true ) public int getLockExceptionCount() { return lockExceptionStore.getCount(); } @Override @Transactional( readOnly = true ) public List<LockException> getAllLockExceptions() { return lockExceptionStore.getAll(); } @Override @Transactional( readOnly = true ) public List<LockException> getLockExceptionsBetween( int first, int max ) { return lockExceptionStore.getAllOrderedName( first, max ); } @Override @Transactional( readOnly = true ) public List<LockException> getLockExceptionCombinations() { return lockExceptionStore.getCombinations(); } @Override @Transactional( readOnly = true ) public LockStatus getLockStatus( User user, DataSet dataSet, Period period, OrganisationUnit organisationUnit, CategoryOptionCombo attributeOptionCombo, Date now ) { if ( dataApprovalService.isApproved( dataSet.getWorkflow(), period, organisationUnit, attributeOptionCombo ) ) { return LockStatus.APPROVED; } if ( isLocked( user, dataSet, period, organisationUnit, now ) ) { return LockStatus.LOCKED; } return LockStatus.OPEN; } @Override @Transactional( readOnly = true ) public LockStatus getLockStatus( User user, DataSet dataSet, Period period, OrganisationUnit organisationUnit, CategoryOptionCombo attributeOptionCombo, Date now, boolean useOrgUnitChildren ) { if ( !useOrgUnitChildren ) { return getLockStatus( user, dataSet, period, organisationUnit, attributeOptionCombo, now ); } if ( organisationUnit == null || !organisationUnit.hasChild() ) { return LockStatus.OPEN; } for ( OrganisationUnit child : organisationUnit.getChildren() ) { LockStatus childLockStatus = getLockStatus( user, dataSet, period, child, attributeOptionCombo, now ); if ( !childLockStatus.isOpen() ) { return childLockStatus; } } return LockStatus.OPEN; } @Override @Transactional( readOnly = true ) public LockStatus getLockStatus( User user, DataElement dataElement, Period period, OrganisationUnit organisationUnit, CategoryOptionCombo attributeOptionCombo, Date now ) { if ( user == null || !user.isAuthorized( Authorities.F_EDIT_EXPIRED.getAuthority() ) ) { now = now != null ? now : new Date(); boolean expired = dataElement.isExpired( period, now ); if ( expired && lockExceptionStore.getCount( dataElement, period, organisationUnit ) == 0L ) { return LockStatus.LOCKED; } } DataSet dataSet = dataElement.getApprovalDataSet(); if ( dataSet == null ) { return LockStatus.OPEN; } if ( dataApprovalService.isApproved( dataSet.getWorkflow(), period, organisationUnit, attributeOptionCombo ) ) { return LockStatus.APPROVED; } return LockStatus.OPEN; } @Override @Transactional public void deleteLockExceptionCombination( DataSet dataSet, Period period ) { lockExceptionStore.deleteCombination( dataSet, period ); } @Override @Transactional public void deleteLockExceptionCombination( DataSet dataSet, Period period, OrganisationUnit organisationUnit ) { lockExceptionStore.deleteCombination( dataSet, period, organisationUnit ); } @Override @Transactional public void deleteLockExceptions( OrganisationUnit organisationUnit ) { lockExceptionStore.delete( organisationUnit ); } @Override @Transactional( readOnly = true ) public boolean isLocked( User user, DataSet dataSet, Period period, OrganisationUnit organisationUnit, Date now ) { return dataSet.isLocked( user, period, now ) && lockExceptionStore.getCount( dataSet, period, organisationUnit ) == 0L; } @Override @Transactional public List<LockException> filterLockExceptions( List<String> filters ) { List<LockException> lockExceptions = getAllLockExceptions(); Set<LockException> returnList = new HashSet<>( lockExceptions ); for ( String filter : filters ) { String[] split = filter.split( ":" ); if ( split.length != 3 ) { throw new QueryParserException( "Invalid filter: " + filter ); } if ( "organisationUnit.id".equalsIgnoreCase( split[0] ) ) { returnList.retainAll( getLockExceptionByOrganisationUnit( split[1], split[2], returnList ) ); } if ( "dataSet.id".equalsIgnoreCase( split[0] ) ) { returnList.retainAll( getLockExceptionByDataSet( split[1], split[2], returnList ) ); } if ( "period".equalsIgnoreCase( split[0] ) ) { returnList.retainAll( getLockExceptionByPeriod( split[1], split[2], returnList ) ); } } return new ArrayList<>( returnList ); } private List<LockException> getLockExceptionByOrganisationUnit( String operator, String orgUnitIds, Collection<LockException> lockExceptions ) { List<String> ids = parseIdFromString( orgUnitIds, operator ); return lockExceptions.stream() .filter( lockException -> ids.contains( lockException.getOrganisationUnit().getUid() ) ) .collect( Collectors.toList() ); } private List<LockException> getLockExceptionByDataSet( String operator, String dataSetIds, Collection<LockException> lockExceptions ) { List<String> ids = parseIdFromString( dataSetIds, operator ); return lockExceptions.stream() .filter( lockException -> ids.contains( lockException.getDataSet().getUid() ) ) .collect( Collectors.toList() ); } private List<LockException> getLockExceptionByPeriod( String operator, String periods, Collection<LockException> lockExceptions ) { List<String> ids = parseIdFromString( periods, operator ); return lockExceptions.stream() .filter( lockException -> ids.contains( lockException.getPeriod().getIsoDate() ) ) .collect( Collectors.toList() ); } private List<String> parseIdFromString( String input, String operator ) { List<String> ids = new ArrayList<>(); if ( "in".equalsIgnoreCase( operator ) ) { if ( input.startsWith( "[" ) && input.endsWith( "]" ) ) { String[] split = input.substring( 1, input.length() - 1 ).split( "," ); Collections.addAll( ids, split ); } else { throw new QueryParserException( "Invalid query: " + input ); } } else if ( "eq".equalsIgnoreCase( operator ) ) { ids.add( input ); } return ids; } }
bsd-3-clause
Kolbeinsvik/dhis2-android-sdk
core/src/main/java/org/hisp/dhis/android/sdk/core/network/SessionManager.java
2414
/* * Copyright (c) 2015, University of Oslo * * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.android.sdk.core.network; import org.hisp.dhis.android.sdk.core.models.ResourceType; import java.util.HashSet; import java.util.Set; public final class SessionManager { private static SessionManager mSessionManager; private final Set<ResourceType> mResources; private SessionManager() { mResources = new HashSet<>(); } public static SessionManager getInstance() { if (mSessionManager == null) { mSessionManager = new SessionManager(); } return mSessionManager; } public void delete() { mResources.clear(); } public void setResourceTypeSynced(ResourceType resourceType) { mResources.add(resourceType); } public boolean isResourceTypeSynced(ResourceType resourceType) { return mResources.contains(resourceType); } }
bsd-3-clause
scheib/chromium
content/public/android/java/src/org/chromium/content/browser/framehost/RenderFrameHostImpl.java
11180
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.content.browser.framehost; import androidx.annotation.Nullable; import org.chromium.base.Callback; import org.chromium.base.UnguessableToken; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.NativeMethods; import org.chromium.blink.mojom.AuthenticatorStatus; import org.chromium.content_public.browser.GlobalRenderFrameHostId; import org.chromium.content_public.browser.LifecycleState; import org.chromium.content_public.browser.PermissionsPolicyFeature; import org.chromium.content_public.browser.RenderFrameHost; import org.chromium.mojo.bindings.Interface; import org.chromium.mojo.bindings.InterfaceRequest; import org.chromium.mojo.system.Pair; import org.chromium.mojo.system.impl.CoreImpl; import org.chromium.url.GURL; import org.chromium.url.Origin; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * The RenderFrameHostImpl Java wrapper to allow communicating with the native RenderFrameHost * object. */ @JNINamespace("content") public class RenderFrameHostImpl implements RenderFrameHost { private long mNativeRenderFrameHostAndroid; // mDelegate can be null. private final RenderFrameHostDelegate mDelegate; private final boolean mIncognito; private final GlobalRenderFrameHostId mRenderFrameHostId; private RenderFrameHostImpl(long nativeRenderFrameHostAndroid, RenderFrameHostDelegate delegate, boolean isIncognito, int renderProcessId, int renderFrameId) { mNativeRenderFrameHostAndroid = nativeRenderFrameHostAndroid; mDelegate = delegate; mIncognito = isIncognito; mRenderFrameHostId = new GlobalRenderFrameHostId(renderProcessId, renderFrameId); mDelegate.renderFrameCreated(this); } @CalledByNative private static RenderFrameHostImpl create(long nativeRenderFrameHostAndroid, RenderFrameHostDelegate delegate, boolean isIncognito, int renderProcessId, int renderFrameId) { return new RenderFrameHostImpl(nativeRenderFrameHostAndroid, delegate, isIncognito, renderProcessId, renderFrameId); } @CalledByNative private void clearNativePtr() { mNativeRenderFrameHostAndroid = 0; mDelegate.renderFrameDeleted(this); } @CalledByNative private long getNativePointer() { return mNativeRenderFrameHostAndroid; } /** * Get the delegate associated with this RenderFrameHost. * * @return The delegate associated with this RenderFrameHost. */ public RenderFrameHostDelegate getRenderFrameHostDelegate() { return mDelegate; } public long getNativePtr() { return mNativeRenderFrameHostAndroid; } @Override @Nullable public GURL getLastCommittedURL() { if (mNativeRenderFrameHostAndroid == 0) return null; return RenderFrameHostImplJni.get().getLastCommittedURL( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @Override @Nullable public Origin getLastCommittedOrigin() { if (mNativeRenderFrameHostAndroid == 0) return null; return RenderFrameHostImplJni.get().getLastCommittedOrigin( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @Override public void getCanonicalUrlForSharing(Callback<GURL> callback) { if (mNativeRenderFrameHostAndroid == 0) { callback.onResult(null); return; } RenderFrameHostImplJni.get().getCanonicalUrlForSharing( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this, callback); } @Override public List<RenderFrameHost> getAllRenderFrameHosts() { if (mNativeRenderFrameHostAndroid == 0) return null; RenderFrameHost[] frames = RenderFrameHostImplJni.get().getAllRenderFrameHosts( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); return Collections.unmodifiableList(Arrays.asList(frames)); } @Override public boolean isFeatureEnabled(@PermissionsPolicyFeature int feature) { return mNativeRenderFrameHostAndroid != 0 && RenderFrameHostImplJni.get().isFeatureEnabled( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this, feature); } /** * TODO(timloh): This function shouldn't really be on here. If we end up * needing more logic from the native BrowserContext, we should add a * wrapper for that and move this function there. */ @Override public boolean isIncognito() { return mIncognito; } @Override public void notifyUserActivation() { if (mNativeRenderFrameHostAndroid == 0) return; RenderFrameHostImplJni.get().notifyUserActivation( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @Override public boolean signalCloseWatcherIfActive() { return RenderFrameHostImplJni.get().signalCloseWatcherIfActive( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @Override public boolean isRenderFrameCreated() { if (mNativeRenderFrameHostAndroid == 0) return false; return RenderFrameHostImplJni.get().isRenderFrameCreated( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @Override public <I extends Interface, P extends Interface.Proxy> P getInterfaceToRendererFrame( Interface.Manager<I, P> manager) { if (mNativeRenderFrameHostAndroid == 0) return null; Pair<P, InterfaceRequest<I>> result = manager.getInterfaceRequest(CoreImpl.getInstance()); RenderFrameHostImplJni.get().getInterfaceToRendererFrame(mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this, manager.getName(), result.second.passHandle().releaseNativeHandle()); return result.first; } @Override public void terminateRendererDueToBadMessage(int reason) { if (mNativeRenderFrameHostAndroid == 0) return; RenderFrameHostImplJni.get().terminateRendererDueToBadMessage( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this, reason); } /** * Return the AndroidOverlay routing token for this RenderFrameHostImpl. */ @Nullable public UnguessableToken getAndroidOverlayRoutingToken() { if (mNativeRenderFrameHostAndroid == 0) return null; return RenderFrameHostImplJni.get().getAndroidOverlayRoutingToken( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @Override public boolean areInputEventsIgnored() { if (mNativeRenderFrameHostAndroid == 0) return false; return RenderFrameHostImplJni.get().isProcessBlocked( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @Override public WebAuthSecurityChecksResults performGetAssertionWebAuthSecurityChecks( String relyingPartyId, Origin effectiveOrigin, boolean isPaymentCredentialGetAssertion) { if (mNativeRenderFrameHostAndroid == 0) { return new WebAuthSecurityChecksResults( AuthenticatorStatus.UNKNOWN_ERROR, false /*unused*/); } return RenderFrameHostImplJni.get().performGetAssertionWebAuthSecurityChecks( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this, relyingPartyId, effectiveOrigin, isPaymentCredentialGetAssertion); } @CalledByNative private static RenderFrameHost.WebAuthSecurityChecksResults createWebAuthSecurityChecksResults( @AuthenticatorStatus.EnumType int securityCheckResult, boolean isCrossOrigin) { return new WebAuthSecurityChecksResults(securityCheckResult, isCrossOrigin); } @Override public int performMakeCredentialWebAuthSecurityChecks( String relyingPartyId, Origin effectiveOrigin, boolean isPaymentCredentialCreation) { if (mNativeRenderFrameHostAndroid == 0) return AuthenticatorStatus.UNKNOWN_ERROR; return RenderFrameHostImplJni.get().performMakeCredentialWebAuthSecurityChecks( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this, relyingPartyId, effectiveOrigin, isPaymentCredentialCreation); } @Override public GlobalRenderFrameHostId getGlobalRenderFrameHostId() { return mRenderFrameHostId; } @Override @LifecycleState public int getLifecycleState() { if (mNativeRenderFrameHostAndroid == 0) return LifecycleState.PENDING_DELETION; return RenderFrameHostImplJni.get().getLifecycleState( mNativeRenderFrameHostAndroid, RenderFrameHostImpl.this); } @NativeMethods interface Natives { GURL getLastCommittedURL(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); Origin getLastCommittedOrigin( long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); void getCanonicalUrlForSharing(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller, Callback<GURL> callback); RenderFrameHost[] getAllRenderFrameHosts( long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); boolean isFeatureEnabled(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller, @PermissionsPolicyFeature int feature); UnguessableToken getAndroidOverlayRoutingToken( long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); void notifyUserActivation(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); boolean signalCloseWatcherIfActive( long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); boolean isRenderFrameCreated(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); void getInterfaceToRendererFrame(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller, String interfacename, int messagePipeRawHandle); void terminateRendererDueToBadMessage( long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller, int reason); boolean isProcessBlocked(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); RenderFrameHost.WebAuthSecurityChecksResults performGetAssertionWebAuthSecurityChecks( long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller, String relyingPartyId, Origin effectiveOrigin, boolean isPaymentCredentialGetAssertion); int performMakeCredentialWebAuthSecurityChecks(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller, String relyingPartyId, Origin effectiveOrigin, boolean isPaymentCredentialCreation); int getLifecycleState(long nativeRenderFrameHostAndroid, RenderFrameHostImpl caller); } }
bsd-3-clause
LWJGL-CI/lwjgl3
modules/lwjgl/llvm/src/generated/java/org/lwjgl/llvm/package-info.java
731
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ /** * Contains bindings to <a href="https://llvm.org/">LLVM</a>, a collection of modular and reusable compiler and toolchain technologies. * * <h3>UNSTABLE API</h3> * * <p>Until these bindings are sufficiently tested, this API should be considered unstable.</p> * * <h3>BINDINGS ONLY</h3> * * <p>LWJGL does not currently include pre-built LLVM/Clang binaries. The user must download or build LLVM separately and use * {@link org.lwjgl.system.Configuration Configuration} to point LWJGL to the appropriate binaries.</p> */ @org.lwjgl.system.NonnullDefault package org.lwjgl.llvm;
bsd-3-clause
lobo12/ormlite-core
src/main/java/com/j256/ormlite/dao/CloseableWrappedIterable.java
784
package com.j256.ormlite.dao; import java.io.Closeable; import java.io.IOException; /** * Extension to CloseableIterable which defines a class which has an iterator() method that returns a * {@link CloseableIterator} but also can be closed itself. This allows us to do something like this pattern: * * <pre> * CloseableWrappedIterable&lt;Foo&gt; wrapperIterable = fooDao.getCloseableIterable(); * try { * for (Foo foo : wrapperIterable) { * ... * } * } finally { * wrapperIterable.close(); * } * </pre> * * @author graywatson */ public interface CloseableWrappedIterable<T> extends CloseableIterable<T>, Closeable { /** * This will close the last iterator returned by the {@link #iterator()} method. */ public void close() throws IOException; }
isc
cinjoff/XChange-1
xchange-taurus/src/main/java/com/xeiam/xchange/taurus/Taurus.java
881
package com.xeiam.xchange.taurus; import java.io.IOException; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import com.xeiam.xchange.taurus.dto.marketdata.TaurusOrderBook; import com.xeiam.xchange.taurus.dto.marketdata.TaurusTicker; import com.xeiam.xchange.taurus.dto.marketdata.TaurusTransaction; @Path("/") @Produces(MediaType.APPLICATION_JSON) public interface Taurus { @GET @Path("order_book/") TaurusOrderBook getOrderBook() throws IOException; @GET @Path("ticker/") TaurusTicker getTicker() throws IOException; @GET @Path("transactions/") TaurusTransaction[] getTransactions() throws IOException; @GET @Path("transactions/") TaurusTransaction[] getTransactions(@QueryParam("time") Time time) throws IOException; enum Time { hour, minute } }
mit
lukegb/SpongeCommon
src/main/java/org/spongepowered/common/interfaces/IMixinScoreboard.java
1529
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.interfaces; import org.spongepowered.common.scoreboard.SpongeScoreboard; public interface IMixinScoreboard { void setSpongeScoreboard(SpongeScoreboard scoreboard); SpongeScoreboard getSpongeScoreboard(); boolean echoToSponge(); }
mit
Juffik/JavaRush-1
src/com/javarush/test/level11/lesson08/task02/Solution.java
956
package com.javarush.test.level11.lesson08.task02; /* Доступ есть Скрыть все внутренние переменные класса Cat, но только те, к которым остается доступ с помощью методов. */ public class Solution { public static void main(String[] args) { } public class Cat { private String name; private int age; public int weight; public Cat(String name, int age, int weight) { this.name = name; this.age = age; this.weight = weight; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getAge() { return age; } public void setAge(int age) { this.age = age; } } }
mit
AdamLuptak/Time_MNA
library/src/test/java/com/orm/record/BooleanFieldTests.java
2869
package com.orm.record; import com.orm.app.ClientApp; import com.orm.dsl.BuildConfig; import com.orm.model.BooleanFieldAnnotatedModel; import com.orm.model.BooleanFieldExtendedModel; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricGradleTestRunner; import org.robolectric.annotation.Config; import static com.orm.SugarRecord.save; import static com.orm.SugarRecord.findById; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; @RunWith(RobolectricGradleTestRunner.class) @Config(sdk = 18, constants = BuildConfig.class, application = ClientApp.class, packageName = "com.orm.model", manifest = Config.NONE) public final class BooleanFieldTests { @Test public void nullBooleanExtendedTest() { save(new BooleanFieldExtendedModel()); BooleanFieldExtendedModel model = findById(BooleanFieldExtendedModel.class, 1); assertNull(model.getBoolean()); } @Test public void nullRawBooleanExtendedTest() { save(new BooleanFieldExtendedModel()); BooleanFieldExtendedModel model = findById(BooleanFieldExtendedModel.class, 1); assertEquals(false, model.getRawBoolean()); } @Test public void nullBooleanAnnotatedTest() { save(new BooleanFieldAnnotatedModel()); BooleanFieldAnnotatedModel model = findById(BooleanFieldAnnotatedModel.class, 1); assertNull(model.getBoolean()); } @Test public void nullRawBooleanAnnotatedTest() { save(new BooleanFieldAnnotatedModel()); BooleanFieldAnnotatedModel model = findById(BooleanFieldAnnotatedModel.class, 1); assertEquals(false, model.getRawBoolean()); } ////TODO check this method // @Test // public void objectBooleanExtendedTest() { // save(new BooleanFieldExtendedModel(true)); // BooleanFieldExtendedModel model = SugarRecord.findById(BooleanFieldExtendedModel.class, 1); // assertEquals(true, model.getBoolean()); // } @Test public void rawBooleanExtendedTest() { save(new BooleanFieldExtendedModel(true)); BooleanFieldExtendedModel model = findById(BooleanFieldExtendedModel.class, 1); assertEquals(true, model.getRawBoolean()); } // //TODO check this // @Test // public void objectBooleanAnnotatedTest() { // save(new BooleanFieldAnnotatedModel(true)); // BooleanFieldAnnotatedModel model = SugarRecord.findById(BooleanFieldAnnotatedModel.class, 1); // // if (null != model) { // assertEquals(true, model.getBoolean()); // } // } @Test public void rawBooleanAnnotatedTest() { save(new BooleanFieldAnnotatedModel(true)); BooleanFieldAnnotatedModel model = findById(BooleanFieldAnnotatedModel.class, 1); assertEquals(true, model.getRawBoolean()); } }
mit
tobyclemson/msci-project
vendor/colt-1.2.0/src/cern/jet/random/AbstractDiscreteDistribution.java
1166
/* Copyright © 1999 CERN - European Organization for Nuclear Research. Permission to use, copy, modify, distribute and sell this software and its documentation for any purpose is hereby granted without fee, provided that the above copyright notice appear in all copies and that both that copyright notice and this permission notice appear in supporting documentation. CERN makes no representations about the suitability of this software for any purpose. It is provided "as is" without expressed or implied warranty. */ package cern.jet.random; /** * Abstract base class for all discrete distributions. * * @author wolfgang.hoschek@cern.ch * @version 1.0, 09/24/99 */ public abstract class AbstractDiscreteDistribution extends AbstractDistribution { /** * Makes this class non instantiable, but still let's others inherit from it. */ protected AbstractDiscreteDistribution() {} /** * Returns a random number from the distribution; returns <tt>(double) nextInt()</tt>. */ public double nextDouble() { return (double) nextInt(); } /** * Returns a random number from the distribution. */ public abstract int nextInt(); }
mit
selvasingh/azure-sdk-for-java
sdk/cognitiveservices/ms-azure-cs-luis-runtime/src/main/java/com/microsoft/azure/cognitiveservices/language/luis/runtime/models/APIErrorException.java
1431
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.cognitiveservices.language.luis.runtime.models; import com.microsoft.rest.RestException; import okhttp3.ResponseBody; import retrofit2.Response; /** * Exception thrown for an invalid response with APIError information. */ public class APIErrorException extends RestException { /** * Initializes a new instance of the APIErrorException class. * * @param message the exception message or the response content if a message is not available * @param response the HTTP response */ public APIErrorException(final String message, final Response<ResponseBody> response) { super(message, response); } /** * Initializes a new instance of the APIErrorException class. * * @param message the exception message or the response content if a message is not available * @param response the HTTP response * @param body the deserialized response body */ public APIErrorException(final String message, final Response<ResponseBody> response, final APIError body) { super(message, response, body); } @Override public APIError body() { return (APIError) super.body(); } }
mit
sohtsuka/Spring-MVC-Beginners-Guide-Second-Edition
Chp 12 code/webstore/src/main/java/com/packt/webstore/service/OrderService.java
145
package com.packt.webstore.service; import com.packt.webstore.domain.Order; public interface OrderService { Long saveOrder(Order order); }
mit
drbgfc/mdht
cda/tests/org.openhealthtools.mdht.uml.cda.ihe.tests/src/org/openhealthtools/mdht/uml/cda/ihe/tests/RectumSectionTest.java
6391
/******************************************************************************* * Copyright (c) 2011, 2012 Sean Muir and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Sean Muir (JKM Software) - initial API and implementation *******************************************************************************/ package org.openhealthtools.mdht.uml.cda.ihe.tests; import java.util.Map; import org.eclipse.emf.common.util.BasicDiagnostic; import org.eclipse.emf.ecore.EObject; import org.junit.Test; import org.openhealthtools.mdht.uml.cda.ihe.IHEFactory; import org.openhealthtools.mdht.uml.cda.ihe.RectumSection; import org.openhealthtools.mdht.uml.cda.ihe.operations.RectumSectionOperations; import org.openhealthtools.mdht.uml.cda.operations.CDAValidationTest; /** * <!-- begin-user-doc --> * A static utility class that provides operations related to '<em><b>Rectum Section</b></em>' model objects. * <!-- end-user-doc --> * * <p> * The following operations are supported: * <ul> * <li>{@link org.openhealthtools.mdht.uml.cda.ihe.RectumSection#validateRectumSectionTemplateId(org.eclipse.emf.common.util.DiagnosticChain, java.util.Map) <em>Validate Rectum Section Template Id</em>}</li> * <li>{@link org.openhealthtools.mdht.uml.cda.ihe.RectumSection#validateRectumSectionCode(org.eclipse.emf.common.util.DiagnosticChain, java.util.Map) <em>Validate Rectum Section Code</em>}</li> * <li>{@link org.openhealthtools.mdht.uml.cda.ihe.RectumSection#validateRectumSectionProblemEntry(org.eclipse.emf.common.util.DiagnosticChain, java.util.Map) <em>Validate Rectum Section Problem Entry</em>}</li> * <li>{@link org.openhealthtools.mdht.uml.cda.ihe.RectumSection#getProblemEntry() <em>Get Problem Entry</em>}</li> * </ul> * </p> * * @generated */ public class RectumSectionTest extends CDAValidationTest { /** * * @generated */ @Test public void testValidateRectumSectionTemplateId() { OperationsTestCase<RectumSection> validateRectumSectionTemplateIdTestCase = new OperationsTestCase<RectumSection>( "validateRectumSectionTemplateId", operationsForOCL.getOCLValue("VALIDATE_RECTUM_SECTION_TEMPLATE_ID__DIAGNOSTIC_CHAIN_MAP__EOCL_EXP"), objectFactory) { @Override protected void updateToFail(RectumSection target) { } @Override protected void updateToPass(RectumSection target) { target.init(); } @Override protected boolean validate(EObject objectToTest, BasicDiagnostic diagnostician, Map<Object, Object> map) { return RectumSectionOperations.validateRectumSectionTemplateId( (RectumSection) objectToTest, diagnostician, map); } }; validateRectumSectionTemplateIdTestCase.doValidationTest(); } /** * * @generated */ @Test public void testValidateRectumSectionCode() { OperationsTestCase<RectumSection> validateRectumSectionCodeTestCase = new OperationsTestCase<RectumSection>( "validateRectumSectionCode", operationsForOCL.getOCLValue("VALIDATE_RECTUM_SECTION_CODE__DIAGNOSTIC_CHAIN_MAP__EOCL_EXP"), objectFactory) { @Override protected void updateToFail(RectumSection target) { } @Override protected void updateToPass(RectumSection target) { target.init(); } @Override protected boolean validate(EObject objectToTest, BasicDiagnostic diagnostician, Map<Object, Object> map) { return RectumSectionOperations.validateRectumSectionCode( (RectumSection) objectToTest, diagnostician, map); } }; validateRectumSectionCodeTestCase.doValidationTest(); } /** * * @generated not */ @Test public void testValidateRectumSectionProblemEntry() { OperationsTestCase<RectumSection> validateRectumSectionProblemEntryTestCase = new OperationsTestCase<RectumSection>( "validateRectumSectionProblemEntry", operationsForOCL.getOCLValue("VALIDATE_RECTUM_SECTION_PROBLEM_ENTRY__DIAGNOSTIC_CHAIN_MAP__EOCL_EXP"), objectFactory) { @Override protected void updateToFail(RectumSection target) { target.init(); } @Override protected void updateToPass(RectumSection target) { target.addObservation(IHEFactory.eINSTANCE.createProblemEntry().init()); } @Override protected boolean validate(EObject objectToTest, BasicDiagnostic diagnostician, Map<Object, Object> map) { return RectumSectionOperations.validateRectumSectionProblemEntry( (RectumSection) objectToTest, diagnostician, map); } }; validateRectumSectionProblemEntryTestCase.doValidationTest(); } /** * * @generated */ @Test public void testGetProblemEntry() { RectumSection target = objectFactory.create(); target.getProblemEntry(); } /** * * @generated */ private static class OperationsForOCL extends RectumSectionOperations { public String getOCLValue(String fieldName) { String oclValue = null; try { oclValue = (String) this.getClass().getSuperclass().getDeclaredField(fieldName).get(this); } catch (Exception e) { oclValue = "NO OCL FOUND FOR PROPERTY " + fieldName; } return oclValue; } } /** * * @generated */ private static class ObjectFactory implements TestObjectFactory<RectumSection> { public RectumSection create() { return IHEFactory.eINSTANCE.createRectumSection(); } } /** * * @generated */ private static OperationsForOCL operationsForOCL = new OperationsForOCL(); /** * * @generated */ private static ObjectFactory objectFactory = new ObjectFactory(); /** * Tests Operations Constructor for 100% coverage * @generated */ private static class ConstructorTestClass extends RectumSectionOperations { }; /** * Tests Operations Constructor for 100% coverage * @generated */ @Test public void testConstructor() { @SuppressWarnings("unused") ConstructorTestClass constructorTestClass = new ConstructorTestClass(); } // testConstructor /** * * @generated */ @Override protected EObject getObjectToTest() { return null; } } // RectumSectionOperations
epl-1.0
riuvshin/che-plugins
plugin-java/che-plugin-java-ext-java/src/main/java/org/eclipse/che/ide/ext/java/messages/DependenciesUpdatedMessage.java
725
/******************************************************************************* * Copyright (c) 2012-2015 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.ide.ext.java.messages; import com.google.gwt.webworker.client.messages.Message; /** * @author Evgen Vidolob */ public interface DependenciesUpdatedMessage extends Message { }
epl-1.0
TypeFox/che
ide/che-core-ide-app/src/main/java/org/eclipse/che/ide/part/editor/TabItemWithMarks.java
762
/* * Copyright (c) 2012-2017 Red Hat, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Red Hat, Inc. - initial API and implementation */ package org.eclipse.che.ide.part.editor; import org.eclipse.che.ide.api.parts.PartStackView; /** * Tab with error and warning marks * * @author Oleksii Orel */ public interface TabItemWithMarks extends PartStackView.TabItem { /** Add error mark for Tab title */ void setErrorMark(boolean isVisible); /** Add warning mark for Tab title */ void setWarningMark(boolean isVisible); }
epl-1.0
jboss-reddeer/reddeer
plugins/org.eclipse.reddeer.swt/src/org/eclipse/reddeer/swt/impl/button/FinishButton.java
1423
/******************************************************************************* * Copyright (c) 2017 Red Hat, Inc and others. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0. * * SPDX-License-Identifier: EPL-2.0 * * Contributors: * Red Hat, Inc - initial API and implementation *******************************************************************************/ package org.eclipse.reddeer.swt.impl.button; import org.eclipse.swt.SWT; import org.eclipse.reddeer.core.reference.ReferencedComposite; /** * FinishButton is simple button implementation for "Finish" button * @author Jiri Peterka * */ public class FinishButton extends PredefinedButton { /** * FinishButton default constructor. */ public FinishButton() { this(null); } /** * Instantiates new FinishButton * @param referencedComposite composite where button should be looked up */ public FinishButton(ReferencedComposite referencedComposite) { this(referencedComposite, 0); } /** * Instantiates new FinishButton * @param referencedComposite composite where button should be looked up * @param index index of finish button */ public FinishButton(ReferencedComposite referencedComposite, int index) { super(referencedComposite, index, "Finish", SWT.PUSH); } }
epl-1.0
paulianttila/openhab
bundles/binding/org.openhab.binding.owserver/src/main/java/org/openhab/binding/owserver/internal/OWServerGenericBindingProvider.java
8820
/** * Copyright (c) 2010-2014, openHAB.org and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.binding.owserver.internal; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.openhab.binding.owserver.OWServerBindingProvider; import org.openhab.core.binding.BindingConfig; import org.openhab.core.items.Item; import org.openhab.core.library.types.StringType; import org.openhab.core.types.Command; import org.openhab.model.item.binding.AbstractGenericBindingProvider; import org.openhab.model.item.binding.BindingConfigParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <p> * This class parses the EDS OWServer item binding data. It registers as a * {@link OWServerBindingProvider} service as well. * </p> * * <p>Here are some examples for valid binding configuration strings: * <ul> * <li><code>{ owserver="<serverId:F90000012A608428:PrimaryValue:2000" }</code></li> * <li><code>{ owserver="<serverId:53000000224EA612:Temperature:10000" }</code></li> * <li><code>{ owserver="<serverId:5A0010000021D57E:DewPoint:10000" }</code></li> * <li><code>{ owserver="<serverId:FC00000310120B1D:Counter_B:10000" }</code></li> * </ul> * * The 'serverId' referenced in the binding string is configured in the openhab.cfg file -: * owserver.serverId.host = 192.168.2.1 * * 'serverId' can be any alphanumeric string as long as it is the same in the binding and * configuration file. <b>NOTE</b>: The parameter is case sensitive! * * @author Chris Jackson * @since 1.3.0 */ public class OWServerGenericBindingProvider extends AbstractGenericBindingProvider implements OWServerBindingProvider { static final Logger logger = LoggerFactory.getLogger(OWServerGenericBindingProvider.class); /** * Artificial command for the owserver-in configuration */ protected static final Command IN_BINDING_KEY = StringType.valueOf("IN_BINDING"); /** {@link Pattern} which matches a binding configuration part */ private static final Pattern BASE_CONFIG_PATTERN = Pattern.compile("(<|>)([0-9.a-zA-Z]+:[0-9.a-zA-Z]+:[0-9._a-zA-Z]+:[0-9]+)"); /** {@link Pattern} which matches an In-Binding */ private static final Pattern IN_BINDING_PATTERN = Pattern.compile("([0-9.a-zA-Z]+):([0-9.a-zA-Z]+):([0-9._a-zA-Z]+):([0-9]+)"); /** * {@inheritDoc} */ public String getBindingType() { return "owserver"; } /** * @{inheritDoc} */ @Override public void validateItemType(Item item, String bindingConfig) throws BindingConfigParseException { } /** * {@inheritDoc} */ @Override public void processBindingConfiguration(String context, Item item, String bindingConfig) throws BindingConfigParseException { super.processBindingConfiguration(context, item, bindingConfig); if (bindingConfig != null) { OWServerBindingConfig config = parseBindingConfig(item, bindingConfig); addBindingConfig(item, config); } else { logger.warn("bindingConfig is NULL (item=" + item + ") -> process bindingConfig aborted!"); } } /** * Delegates parsing the <code>bindingConfig</code> with respect to the * first character (<code>&lt;</code> or <code>&gt;</code>) to the * specialized parsing methods * * @param item * @param bindingConfig * * @throws BindingConfigParseException */ protected OWServerBindingConfig parseBindingConfig(Item item, String bindingConfig) throws BindingConfigParseException { OWServerBindingConfig config = new OWServerBindingConfig(); config.itemType = item.getClass(); Matcher matcher = BASE_CONFIG_PATTERN.matcher(bindingConfig); if (!matcher.matches()) { throw new BindingConfigParseException("bindingConfig '" + bindingConfig + "' doesn't contain a valid binding configuration"); } matcher.reset(); while (matcher.find()) { String direction = matcher.group(1); String bindingConfigPart = matcher.group(2); if (direction.equals("<")) { config = parseInBindingConfig(item, bindingConfigPart, config); } else if (direction.equals(">")) { // for future use } else { throw new BindingConfigParseException("Unknown command given! Configuration must start with '<' or '>' "); } } return config; } /** * Parses a owserver-in configuration by using the regular expression * <code>([0-9.a-zA-Z]+:[0-9.a-zA-Z]+:[0-9._a-zA-Z]+:[0-9]+)</code>. Where the groups should * contain the following content: * <ul> * <li>1 - Server ID</li> * <li>2 - One Wire ROM ID</li> * <li>3 - Variable name</li> * <li>4 - Refresh Interval</li> * </ul> * * @param item * @param bindingConfig the config string to parse * @param config * * @return the filled {@link OWServerBindingConfig} * @throws BindingConfigParseException if the regular expression doesn't match * the given <code>bindingConfig</code> */ protected OWServerBindingConfig parseInBindingConfig(Item item, String bindingConfig, OWServerBindingConfig config) throws BindingConfigParseException { Matcher matcher = IN_BINDING_PATTERN.matcher(bindingConfig); if (!matcher.matches()) { throw new BindingConfigParseException("bindingConfig '" + bindingConfig + "' doesn't represent a valid in-binding-configuration. A valid configuration is matched by the RegExp '"+IN_BINDING_PATTERN+"'"); } matcher.reset(); OWServerBindingConfigElement configElement; while (matcher.find()) { configElement = new OWServerBindingConfigElement(); configElement.serverId = matcher.group(1); configElement.romId = matcher.group(2); configElement.name = matcher.group(3); configElement.refreshInterval = Integer.valueOf(matcher.group(4)).intValue(); logger.debug("OWSERVER: "+configElement); config.put(IN_BINDING_KEY, configElement); } return config; } /** * @{inheritDoc} */ @Override public Class<? extends Item> getItemType(String itemName) { OWServerBindingConfig config = (OWServerBindingConfig) bindingConfigs.get(itemName); return config != null ? config.itemType : null; } /** * {@inheritDoc} */ public String getServerId(String itemName) { OWServerBindingConfig config = (OWServerBindingConfig) bindingConfigs.get(itemName); return config != null && config.get(IN_BINDING_KEY) != null ? config.get(IN_BINDING_KEY).serverId : null; } /** * {@inheritDoc} */ public String getRomId(String itemName){ OWServerBindingConfig config = (OWServerBindingConfig) bindingConfigs.get(itemName); return config != null && config.get(IN_BINDING_KEY) != null ? config.get(IN_BINDING_KEY).romId : null; } /** * {@inheritDoc} */ public String getName(String itemName) { OWServerBindingConfig config = (OWServerBindingConfig) bindingConfigs.get(itemName); return config != null && config.get(IN_BINDING_KEY) != null ? config.get(IN_BINDING_KEY).name : null; } /** * {@inheritDoc} */ public int getRefreshInterval(String itemName) { OWServerBindingConfig config = (OWServerBindingConfig) bindingConfigs.get(itemName); return config != null && config.get(IN_BINDING_KEY) != null ? config.get(IN_BINDING_KEY).refreshInterval : 0; } /** * {@inheritDoc} */ public List<String> getInBindingItemNames() { List<String> inBindings = new ArrayList<String>(); for (String itemName : bindingConfigs.keySet()) { OWServerBindingConfig httpConfig = (OWServerBindingConfig) bindingConfigs.get(itemName); if (httpConfig.containsKey(IN_BINDING_KEY)) { inBindings.add(itemName); } } return inBindings; } /** * This is an internal data structure to map commands to * {@link OWServerBindingConfigElement }. There will be map like * <code>ON->OWServerBindingConfigElement</code> */ static class OWServerBindingConfig extends HashMap<Command, OWServerBindingConfigElement> implements BindingConfig { private static final long serialVersionUID = 946984678609385662L; /** generated serialVersion UID */ Class<? extends Item> itemType; } /** * This is an internal data structure to store information from the binding * config strings and use it to answer the requests to the HTTP binding * provider. */ static class OWServerBindingConfigElement implements BindingConfig { public String serverId; public String romId; public String name; public int refreshInterval; @Override public String toString() { return "OWServerBindingConfigElement [serverId=" + serverId + ", romId=" + romId + ", name=" + name + ", refreshInterval=" + refreshInterval + "]"; } } }
epl-1.0
geneos/adempiere
client/src/org/compiere/acct/AcctViewerData.java
16743
/****************************************************************************** * Product: Adempiere ERP & CRM Smart Business Solution * * Copyright (C) 1999-2006 ComPiere, Inc. All Rights Reserved. * * This program is free software; you can redistribute it and/or modify it * * under the terms version 2 of the GNU General Public License as published * * by the Free Software Foundation. This program is distributed in the hope * * that it will be useful, but WITHOUT ANY WARRANTY; without even the implied * * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * * with this program; if not, write to the Free Software Foundation, Inc., * * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. * * For the text or an alternative of this public license, you may reach us * * ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA * * or via info@compiere.org or http://www.compiere.org/license.html * *****************************************************************************/ package org.compiere.acct; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Properties; import java.util.logging.Level; import javax.swing.JComboBox; import org.compiere.model.MAcctSchema; import org.compiere.model.MAcctSchemaElement; import org.compiere.model.MFactAcct; import org.compiere.model.MLookupFactory; import org.compiere.model.MRefList; import org.compiere.report.core.RColumn; import org.compiere.report.core.RModel; import org.compiere.util.CLogger; import org.compiere.util.DB; import org.compiere.util.DisplayType; import org.compiere.util.Env; import org.compiere.util.KeyNamePair; import org.compiere.util.Language; import org.compiere.util.Msg; import org.compiere.util.ValueNamePair; /** * Account Viewer State - maintains State information for the Account Viewer * * @author Jorg Janke * @version $Id: AcctViewerData.java,v 1.3 2006/08/10 01:00:27 jjanke Exp $ * * @author Teo Sarca, SC ARHIPAC SERVICE SRL * <li>BF [ 1748449 ] Info Account - Posting Type is not translated * <li>BF [ 1778373 ] AcctViewer: data is not sorted proper */ class AcctViewerData { /** * Constructor * @param ctx context * @param windowNo window no * @param ad_Client_ID client * @param ad_Table_ID table */ public AcctViewerData (Properties ctx, int windowNo, int ad_Client_ID, int ad_Table_ID) { WindowNo = windowNo; AD_Client_ID = ad_Client_ID; if (AD_Client_ID == 0) AD_Client_ID = Env.getContextAsInt(Env.getCtx(), WindowNo, "AD_Client_ID"); if (AD_Client_ID == 0) AD_Client_ID = Env.getContextAsInt(Env.getCtx(), "AD_Client_ID"); AD_Table_ID = ad_Table_ID; // ASchemas = MAcctSchema.getClientAcctSchema(ctx, AD_Client_ID); ASchema = ASchemas[0]; } // AcctViewerData /** Window */ public int WindowNo; /** Client */ public int AD_Client_ID; /** All Acct Schema */ public MAcctSchema[] ASchemas = null; /** This Acct Schema */ public MAcctSchema ASchema = null; // Selection Info /** Document Query */ public boolean documentQuery = false; /** Acct Schema */ public int C_AcctSchema_ID = 0; /** Posting Type */ public String PostingType = ""; /** Organization */ public int AD_Org_ID = 0; /** Date From */ public Timestamp DateFrom = null; /** Date To */ public Timestamp DateTo = null; // Document Table Selection Info /** Table ID */ public int AD_Table_ID; /** Record */ public int Record_ID; /** Containing Column and Query */ public HashMap<String,String> whereInfo = new HashMap<String,String>(); /** Containing TableName and AD_Table_ID */ public HashMap<String,Integer> tableInfo = new HashMap<String,Integer>(); // Display Info /** Display Qty */ boolean displayQty = false; /** Display Source Currency */ boolean displaySourceAmt = false; /** Display Document info */ boolean displayDocumentInfo = false; // String sortBy1 = ""; String sortBy2 = ""; String sortBy3 = ""; String sortBy4 = ""; // boolean group1 = false; boolean group2 = false; boolean group3 = false; boolean group4 = false; /** Leasing Columns */ private int m_leadingColumns = 0; /** UserElement1 Reference */ private String m_ref1 = null; /** UserElement2 Reference */ private String m_ref2 = null; /** Logger */ private static CLogger log = CLogger.getCLogger(AcctViewerData.class); /** * Dispose */ public void dispose() { ASchemas = null; ASchema = null; // whereInfo.clear(); whereInfo = null; // Env.clearWinContext(WindowNo); } // dispose /************************************************************************** * Fill Accounting Schema * @param cb JComboBox to be filled */ protected void fillAcctSchema (JComboBox cb) { for (int i = 0; i < ASchemas.length; i++) cb.addItem(new KeyNamePair(ASchemas[i].getC_AcctSchema_ID(), ASchemas[i].getName())); } // fillAcctSchema /** * Fill Posting Type * @param cb JComboBox to be filled */ protected void fillPostingType (JComboBox cb) { int AD_Reference_ID = 125; ValueNamePair[] pt = MRefList.getList(Env.getCtx(), AD_Reference_ID, true); for (int i = 0; i < pt.length; i++) cb.addItem(pt[i]); } // fillPostingType /** * Fill Table with * ValueNamePair (TableName, translatedKeyColumnName) * and tableInfo with (TableName, AD_Table_ID) * and select the entry for AD_Table_ID * * @param cb JComboBox to be filled */ protected void fillTable (JComboBox cb) { ValueNamePair select = null; // String sql = "SELECT AD_Table_ID, TableName FROM AD_Table t " + "WHERE EXISTS (SELECT * FROM AD_Column c" + " WHERE t.AD_Table_ID=c.AD_Table_ID AND c.ColumnName='Posted')" + " AND IsView='N'"; try { PreparedStatement pstmt = DB.prepareStatement(sql, null); ResultSet rs = pstmt.executeQuery(); while (rs.next()) { int id = rs.getInt(1); String tableName = rs.getString(2); String name = Msg.translate(Env.getCtx(), tableName+"_ID"); // ValueNamePair pp = new ValueNamePair(tableName, name); cb.addItem(pp); tableInfo.put (tableName, new Integer(id)); if (id == AD_Table_ID) select = pp; } rs.close(); pstmt.close(); } catch (SQLException e) { log.log(Level.SEVERE, sql, e); } if (select != null) cb.setSelectedItem(select); } // fillTable /** * Fill Org * * @param cb JComboBox to be filled */ protected void fillOrg (JComboBox cb) { KeyNamePair pp = new KeyNamePair(0, ""); cb.addItem(pp); String sql = "SELECT AD_Org_ID, Name FROM AD_Org WHERE AD_Client_ID=? ORDER BY Value"; try { PreparedStatement pstmt = DB.prepareStatement(sql, null); pstmt.setInt(1, AD_Client_ID); ResultSet rs = pstmt.executeQuery(); while (rs.next()) cb.addItem(new KeyNamePair(rs.getInt(1), rs.getString(2))); rs.close(); pstmt.close(); } catch (SQLException e) { log.log(Level.SEVERE, sql, e); } } // fillOrg /** * Get Button Text * * @param tableName table * @param columnName column * @param selectSQL sql * @return Text on button */ protected String getButtonText (String tableName, String columnName, String selectSQL) { // SELECT (<embedded>) FROM tableName avd WHERE avd.<selectSQL> StringBuffer sql = new StringBuffer ("SELECT ("); Language language = Env.getLanguage(Env.getCtx()); sql.append(MLookupFactory.getLookup_TableDirEmbed(language, columnName, "avd")) .append(") FROM ").append(tableName).append(" avd WHERE avd.").append(selectSQL); String retValue = "<" + selectSQL + ">"; try { Statement stmt = DB.createStatement(); ResultSet rs = stmt.executeQuery(sql.toString()); if (rs.next()) retValue = rs.getString(1); rs.close(); stmt.close(); } catch (SQLException e) { log.log(Level.SEVERE, sql.toString(), e); } return retValue; } // getButtonText /************************************************************************** /** * Create Query and submit * @return Report Model */ protected RModel query() { // Set Where Clause StringBuffer whereClause = new StringBuffer(); // Add Organization if (C_AcctSchema_ID != 0) whereClause.append(RModel.TABLE_ALIAS) .append(".C_AcctSchema_ID=").append(C_AcctSchema_ID); // Posting Type Selected if (PostingType != null && PostingType.length() > 0) { if (whereClause.length() > 0) whereClause.append(" AND "); whereClause.append(RModel.TABLE_ALIAS) .append(".PostingType='").append(PostingType).append("'"); } // if (documentQuery) { if (whereClause.length() > 0) whereClause.append(" AND "); whereClause.append(RModel.TABLE_ALIAS).append(".AD_Table_ID=").append(AD_Table_ID) .append(" AND ").append(RModel.TABLE_ALIAS).append(".Record_ID=").append(Record_ID); } else { // get values (Queries) Iterator<String> it = whereInfo.values().iterator(); while (it.hasNext()) { String where = (String)it.next(); if (where != null && where.length() > 0) // add only if not empty { if (whereClause.length() > 0) whereClause.append(" AND "); whereClause.append(RModel.TABLE_ALIAS).append(".").append(where); } } if (DateFrom != null || DateTo != null) { if (whereClause.length() > 0) whereClause.append(" AND "); if (DateFrom != null && DateTo != null) whereClause.append("TRUNC(").append(RModel.TABLE_ALIAS).append(".DateAcct, 'DD') BETWEEN ") .append(DB.TO_DATE(DateFrom)).append(" AND ").append(DB.TO_DATE(DateTo)); else if (DateFrom != null) whereClause.append("TRUNC(").append(RModel.TABLE_ALIAS).append(".DateAcct, 'DD') >= ") .append(DB.TO_DATE(DateFrom)); else // DateTo != null whereClause.append("TRUNC(").append(RModel.TABLE_ALIAS).append(".DateAcct, 'DD') <= ") .append(DB.TO_DATE(DateTo)); } // Add Organization if (AD_Org_ID != 0) { if (whereClause.length() > 0) whereClause.append(" AND "); whereClause.append(RModel.TABLE_ALIAS).append(".AD_Org_ID=").append(AD_Org_ID); } } RModel rm = getRModel(); // Set Order By Clause StringBuffer orderClause = new StringBuffer(); if (sortBy1.length() > 0) { RColumn col = rm.getRColumn(sortBy1); if (col != null) orderClause.append(col.getDisplaySQL()); else orderClause.append(RModel.TABLE_ALIAS).append(".").append(sortBy1); } if (sortBy2.length() > 0) { if (orderClause.length() > 0) orderClause.append(","); RColumn col = rm.getRColumn(sortBy2); if (col != null) orderClause.append(col.getDisplaySQL()); else orderClause.append(RModel.TABLE_ALIAS).append(".").append(sortBy2); } if (sortBy3.length() > 0) { if (orderClause.length() > 0) orderClause.append(","); RColumn col = rm.getRColumn(sortBy3); if (col != null) orderClause.append(col.getDisplaySQL()); else orderClause.append(RModel.TABLE_ALIAS).append(".").append(sortBy3); } if (sortBy4.length() > 0) { if (orderClause.length() > 0) orderClause.append(","); RColumn col = rm.getRColumn(sortBy4); if (col != null) orderClause.append(col.getDisplaySQL()); else orderClause.append(RModel.TABLE_ALIAS).append(".").append(sortBy4); } if (orderClause.length() == 0) orderClause.append(RModel.TABLE_ALIAS).append(".Fact_Acct_ID"); // Groups if (group1 && sortBy1.length() > 0) rm.setGroup(sortBy1); if (group2 && sortBy2.length() > 0) rm.setGroup(sortBy2); if (group3 && sortBy3.length() > 0) rm.setGroup(sortBy3); if (group4 && sortBy4.length() > 0) rm.setGroup(sortBy4); // Totals rm.setFunction("AmtAcctDr", RModel.FUNCTION_SUM); rm.setFunction("AmtAcctCr", RModel.FUNCTION_SUM); rm.query (Env.getCtx(), whereClause.toString(), orderClause.toString()); return rm; } // query /** * Create Report Model (Columns) * @return Report Model */ private RModel getRModel() { Properties ctx = Env.getCtx(); RModel rm = new RModel("Fact_Acct"); // Add Key (Lookups) ArrayList<String> keys = createKeyColumns(); int max = m_leadingColumns; if (max == 0) max = keys.size(); for (int i = 0; i < max; i++) { String column = (String)keys.get(i); if (column != null && column.startsWith("Date")) rm.addColumn(new RColumn(ctx, column, DisplayType.Date)); else if (column != null && column.endsWith("_ID")) rm.addColumn(new RColumn(ctx, column, DisplayType.TableDir)); } // Main Info rm.addColumn(new RColumn(ctx, "AmtAcctDr", DisplayType.Amount)); rm.addColumn(new RColumn(ctx, "AmtAcctCr", DisplayType.Amount)); if (displaySourceAmt) { if (!keys.contains("DateTrx")) rm.addColumn(new RColumn(ctx, "DateTrx", DisplayType.Date)); rm.addColumn(new RColumn(ctx, "C_Currency_ID", DisplayType.TableDir)); rm.addColumn(new RColumn(ctx, "AmtSourceDr", DisplayType.Amount)); rm.addColumn(new RColumn(ctx, "AmtSourceCr", DisplayType.Amount)); rm.addColumn(new RColumn(ctx, "Rate", DisplayType.Amount, "CASE WHEN (AmtSourceDr + AmtSourceCr) = 0 THEN 0" + " ELSE (AmtAcctDr + AmtAcctCr) / (AmtSourceDr + AmtSourceCr) END")); } // Remaining Keys for (int i = max; i < keys.size(); i++) { String column = (String)keys.get(i); if (column != null && column.startsWith("Date")) rm.addColumn(new RColumn(ctx, column, DisplayType.Date)); else if (column.startsWith("UserElement")) { if (column.indexOf('1') != -1) rm.addColumn(new RColumn(ctx, column, DisplayType.TableDir, null, 0, m_ref1)); else rm.addColumn(new RColumn(ctx, column, DisplayType.TableDir, null, 0, m_ref2)); } else if (column != null && column.endsWith("_ID")) rm.addColumn(new RColumn(ctx, column, DisplayType.TableDir)); } // Info if (!keys.contains("DateAcct")) rm.addColumn(new RColumn(ctx, "DateAcct", DisplayType.Date)); if (!keys.contains("C_Period_ID")) rm.addColumn(new RColumn(ctx, "C_Period_ID", DisplayType.TableDir)); if (displayQty) { rm.addColumn(new RColumn(ctx, "C_UOM_ID", DisplayType.TableDir)); rm.addColumn(new RColumn(ctx, "Qty", DisplayType.Quantity)); } if (displayDocumentInfo) { rm.addColumn(new RColumn(ctx, "AD_Table_ID", DisplayType.TableDir)); rm.addColumn(new RColumn(ctx, "Record_ID", DisplayType.ID)); rm.addColumn(new RColumn(ctx, "Description", DisplayType.String)); } if (PostingType == null || PostingType.length() == 0) rm.addColumn(new RColumn(ctx, "PostingType", DisplayType.List, // teo_sarca, [ 1664208 ] RModel.TABLE_ALIAS+".PostingType", MFactAcct.POSTINGTYPE_AD_Reference_ID, null)); return rm; } // createRModel /** * Create the key columns in sequence * @return List of Key Columns */ private ArrayList<String> createKeyColumns() { ArrayList<String> columns = new ArrayList<String>(); m_leadingColumns = 0; // Sorting Fields columns.add(sortBy1); // may add "" if (!columns.contains(sortBy2)) columns.add(sortBy2); if (!columns.contains(sortBy3)) columns.add(sortBy3); if (!columns.contains(sortBy4)) columns.add(sortBy4); // Add Account Segments MAcctSchemaElement[] elements = ASchema.getAcctSchemaElements(); for (int i = 0; i < elements.length; i++) { if (m_leadingColumns == 0 && columns.contains("AD_Org_ID") && columns.contains("Account_ID")) m_leadingColumns = columns.size(); // MAcctSchemaElement ase = elements[i]; String columnName = ase.getColumnName(); if (columnName.startsWith("UserElement")) { if (columnName.indexOf('1') != -1) m_ref1 = ase.getDisplayColumnName(); else m_ref2 = ase.getDisplayColumnName(); } if (!columns.contains(columnName)) columns.add(columnName); } if (m_leadingColumns == 0 && columns.contains("AD_Org_ID") && columns.contains("Account_ID")) m_leadingColumns = columns.size(); return columns; } // createKeyColumns } // AcctViewerData
gpl-2.0
axDev-JDK/jaxp
src/com/sun/org/apache/xerces/internal/impl/XMLEntityScanner.java
84658
/* * Copyright (c) 2003, 2006, Oracle and/or its affiliates. All rights reserved. */ /* * Copyright 2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xerces.internal.impl; import java.io.EOFException; import java.io.IOException; import java.util.Locale; import java.util.Vector; import com.sun.xml.internal.stream.Entity; import com.sun.xml.internal.stream.XMLBufferListener; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import com.sun.org.apache.xerces.internal.impl.io.ASCIIReader; import com.sun.org.apache.xerces.internal.impl.io.UCSReader; import com.sun.org.apache.xerces.internal.impl.io.UTF8Reader; import com.sun.org.apache.xerces.internal.impl.msg.XMLMessageFormatter; import com.sun.org.apache.xerces.internal.util.EncodingMap; import com.sun.org.apache.xerces.internal.util.SymbolTable; import com.sun.org.apache.xerces.internal.util.XMLChar; import com.sun.org.apache.xerces.internal.util.XMLStringBuffer; import com.sun.org.apache.xerces.internal.xni.QName; import com.sun.org.apache.xerces.internal.xni.XMLString; import com.sun.org.apache.xerces.internal.xni.parser.XMLComponentManager; import com.sun.org.apache.xerces.internal.xni.parser.XMLConfigurationException; import com.sun.org.apache.xerces.internal.xni.*; /** * Implements the entity scanner methods. * * @author Neeraj Bajaj, Sun Microsystems * @author Andy Clark, IBM * @author Arnaud Le Hors, IBM * @author K.Venugopal Sun Microsystems * */ public class XMLEntityScanner implements XMLLocator { protected Entity.ScannedEntity fCurrentEntity = null ; protected int fBufferSize = XMLEntityManager.DEFAULT_BUFFER_SIZE; protected XMLEntityManager fEntityManager ; /** Debug switching readers for encodings. */ private static final boolean DEBUG_ENCODINGS = false; /** Listeners which should know when load is being called */ private Vector listeners = new Vector(); private static final boolean [] VALID_NAMES = new boolean[127]; /** * Debug printing of buffer. This debugging flag works best when you * resize the DEFAULT_BUFFER_SIZE down to something reasonable like * 64 characters. */ private static final boolean DEBUG_BUFFER = false; private static final boolean DEBUG_SKIP_STRING = false; /** * To signal the end of the document entity, this exception will be thrown. */ private static final EOFException END_OF_DOCUMENT_ENTITY = new EOFException() { private static final long serialVersionUID = 980337771224675268L; public Throwable fillInStackTrace() { return this; } }; protected SymbolTable fSymbolTable = null; protected XMLErrorReporter fErrorReporter = null; int [] whiteSpaceLookup = new int[100]; int whiteSpaceLen = 0; boolean whiteSpaceInfoNeeded = true; /** * Allow Java encoding names. This feature identifier is: * http://apache.org/xml/features/allow-java-encodings */ protected boolean fAllowJavaEncodings; //Will be used only during internal subsets. //for appending data. /** Property identifier: symbol table. */ protected static final String SYMBOL_TABLE = Constants.XERCES_PROPERTY_PREFIX + Constants.SYMBOL_TABLE_PROPERTY; /** Property identifier: error reporter. */ protected static final String ERROR_REPORTER = Constants.XERCES_PROPERTY_PREFIX + Constants.ERROR_REPORTER_PROPERTY; /** Feature identifier: allow Java encodings. */ protected static final String ALLOW_JAVA_ENCODINGS = Constants.XERCES_FEATURE_PREFIX + Constants.ALLOW_JAVA_ENCODINGS_FEATURE; protected PropertyManager fPropertyManager = null ; boolean isExternal = false; static { for(int i=0x0041;i<=0x005A ; i++){ VALID_NAMES[i]=true; } for(int i=0x0061;i<=0x007A; i++){ VALID_NAMES[i]=true; } for(int i=0x0030;i<=0x0039; i++){ VALID_NAMES[i]=true; } VALID_NAMES[45]=true; VALID_NAMES[46]=true; VALID_NAMES[58]=true; VALID_NAMES[95]=true; } // SAPJVM: Remember, that the XML version has explicitly been set, // so that XMLStreamReader.getVersion() can find that out. boolean xmlVersionSetExplicitly = false; // // Constructors // /** Default constructor. */ public XMLEntityScanner() { } // <init>() /** private constructor, this class can only be instantiated within this class. Instance of this class should * be obtained using getEntityScanner() or getEntityScanner(ScannedEntity scannedEntity) * @see getEntityScanner() * @see getEntityScanner(ScannedEntity) */ public XMLEntityScanner(PropertyManager propertyManager, XMLEntityManager entityManager) { fEntityManager = entityManager ; reset(propertyManager); } // <init>() // set buffer size: public final void setBufferSize(int size) { // REVISIT: Buffer size passed to entity scanner // was not being kept in synch with the actual size // of the buffers in each scanned entity. If any // of the buffers were actually resized, it was possible // that the parser would throw an ArrayIndexOutOfBoundsException // for documents which contained names which are longer than // the current buffer size. Conceivably the buffer size passed // to entity scanner could be used to determine a minimum size // for resizing, if doubling its size is smaller than this // minimum. -- mrglavas fBufferSize = size; } /** * Resets the components. */ public void reset(PropertyManager propertyManager){ fSymbolTable = (SymbolTable)propertyManager.getProperty(SYMBOL_TABLE) ; fErrorReporter = (XMLErrorReporter)propertyManager.getProperty(ERROR_REPORTER) ; fCurrentEntity = null; whiteSpaceLen = 0; whiteSpaceInfoNeeded = true; listeners.clear(); } /** * Resets the component. The component can query the component manager * about any features and properties that affect the operation of the * component. * * @param componentManager The component manager. * * @throws SAXException Thrown by component on initialization error. * For example, if a feature or property is * required for the operation of the component, the * component manager may throw a * SAXNotRecognizedException or a * SAXNotSupportedException. */ public void reset(XMLComponentManager componentManager) throws XMLConfigurationException { //System.out.println(" this is being called"); // xerces features fAllowJavaEncodings = componentManager.getFeature(ALLOW_JAVA_ENCODINGS, false); //xerces properties fSymbolTable = (SymbolTable)componentManager.getProperty(SYMBOL_TABLE); fErrorReporter = (XMLErrorReporter)componentManager.getProperty(ERROR_REPORTER); fCurrentEntity = null; whiteSpaceLen = 0; whiteSpaceInfoNeeded = true; listeners.clear(); } // reset(XMLComponentManager) public final void reset(SymbolTable symbolTable, XMLEntityManager entityManager, XMLErrorReporter reporter) { fCurrentEntity = null; fSymbolTable = symbolTable; fEntityManager = entityManager; fErrorReporter = reporter; } /** * Returns the XML version of the current entity. This will normally be the * value from the XML or text declaration or defaulted by the parser. Note that * that this value may be different than the version of the processing rules * applied to the current entity. For instance, an XML 1.1 document may refer to * XML 1.0 entities. In such a case the rules of XML 1.1 are applied to the entire * document. Also note that, for a given entity, this value can only be considered * final once the XML or text declaration has been read or once it has been * determined that there is no such declaration. */ public final String getXMLVersion() { if (fCurrentEntity != null) { return fCurrentEntity.xmlVersion; } return null; } // getXMLVersion():String /** * Sets the XML version. This method is used by the * scanners to report the value of the version pseudo-attribute * in an XML or text declaration. * * @param xmlVersion the XML version of the current entity */ public final void setXMLVersion(String xmlVersion) { xmlVersionSetExplicitly = true; // SAPJVM fCurrentEntity.xmlVersion = xmlVersion; } // setXMLVersion(String) /** set the instance of current scanned entity. * @param ScannedEntity */ public final void setCurrentEntity(Entity.ScannedEntity scannedEntity){ fCurrentEntity = scannedEntity ; if(fCurrentEntity != null){ isExternal = fCurrentEntity.isExternal(); if(DEBUG_BUFFER) System.out.println("Current Entity is "+scannedEntity.name); } } public Entity.ScannedEntity getCurrentEntity(){ return fCurrentEntity ; } // // XMLEntityReader methods // /** * Returns the base system identifier of the currently scanned * entity, or null if none is available. */ public final String getBaseSystemId() { return (fCurrentEntity != null && fCurrentEntity.entityLocation != null) ? fCurrentEntity.entityLocation.getExpandedSystemId() : null; } // getBaseSystemId():String /** * @see com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier#setBaseSystemId(String) */ public void setBaseSystemId(String systemId) { //no-op } ///////////// Locator methods start. public final int getLineNumber(){ //if the entity is closed, we should return -1 //xxx at first place why such call should be there... return fCurrentEntity != null ? fCurrentEntity.lineNumber : -1 ; } /** * @see com.sun.org.apache.xerces.internal.xni.XMLLocator#setLineNumber(int) */ public void setLineNumber(int line) { //no-op } public final int getColumnNumber(){ //if the entity is closed, we should return -1 //xxx at first place why such call should be there... return fCurrentEntity != null ? fCurrentEntity.columnNumber : -1 ; } /** * @see com.sun.org.apache.xerces.internal.xni.XMLLocator#setColumnNumber(int) */ public void setColumnNumber(int col) { // no-op } public final int getCharacterOffset(){ return fCurrentEntity != null ? fCurrentEntity.fTotalCountTillLastLoad + fCurrentEntity.position : -1 ; } /** Returns the expanded system identifier. */ public final String getExpandedSystemId() { return (fCurrentEntity != null && fCurrentEntity.entityLocation != null) ? fCurrentEntity.entityLocation.getExpandedSystemId() : null; } /** * @see com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier#setExpandedSystemId(String) */ public void setExpandedSystemId(String systemId) { //no-op } /** Returns the literal system identifier. */ public final String getLiteralSystemId() { return (fCurrentEntity != null && fCurrentEntity.entityLocation != null) ? fCurrentEntity.entityLocation.getLiteralSystemId() : null; } /** * @see com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier#setLiteralSystemId(String) */ public void setLiteralSystemId(String systemId) { //no-op } /** Returns the public identifier. */ public final String getPublicId() { return (fCurrentEntity != null && fCurrentEntity.entityLocation != null) ? fCurrentEntity.entityLocation.getPublicId() : null; } /** * @see com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier#setPublicId(String) */ public void setPublicId(String publicId) { //no-op } ///////////////// Locator methods finished. /** the version of the current entity being scanned */ public void setVersion(String version){ fCurrentEntity.version = version; } public String getVersion(){ if (fCurrentEntity != null) return fCurrentEntity.version ; return null; } /** * Returns the encoding of the current entity. * Note that, for a given entity, this value can only be * considered final once the encoding declaration has been read (or once it * has been determined that there is no such declaration) since, no encoding * having been specified on the XMLInputSource, the parser * will make an initial "guess" which could be in error. */ public final String getEncoding() { if (fCurrentEntity != null) { return fCurrentEntity.encoding; } return null; } // getEncoding():String /** * Sets the encoding of the scanner. This method is used by the * scanners if the XMLDecl or TextDecl line contains an encoding * pseudo-attribute. * <p> * <strong>Note:</strong> The underlying character reader on the * current entity will be changed to accomodate the new encoding. * However, the new encoding is ignored if the current reader was * not constructed from an input stream (e.g. an external entity * that is resolved directly to the appropriate java.io.Reader * object). * * @param encoding The IANA encoding name of the new encoding. * * @throws IOException Thrown if the new encoding is not supported. * * @see com.sun.org.apache.xerces.internal.util.EncodingMap */ public final void setEncoding(String encoding) throws IOException { if (DEBUG_ENCODINGS) { System.out.println("$$$ setEncoding: "+encoding); } if (fCurrentEntity.stream != null) { // if the encoding is the same, don't change the reader and // re-use the original reader used by the OneCharReader // NOTE: Besides saving an object, this overcomes deficiencies // in the UTF-16 reader supplied with the standard Java // distribution (up to and including 1.3). The UTF-16 // decoder buffers 8K blocks even when only asked to read // a single char! -Ac if (fCurrentEntity.encoding == null || !fCurrentEntity.encoding.equals(encoding)) { // UTF-16 is a bit of a special case. If the encoding is UTF-16, // and we know the endian-ness, we shouldn't change readers. // If it's ISO-10646-UCS-(2|4), then we'll have to deduce // the endian-ness from the encoding we presently have. if(fCurrentEntity.encoding != null && fCurrentEntity.encoding.startsWith("UTF-16")) { String ENCODING = encoding.toUpperCase(Locale.ENGLISH); if(ENCODING.equals("UTF-16")) return; if(ENCODING.equals("ISO-10646-UCS-4")) { if(fCurrentEntity.encoding.equals("UTF-16BE")) { fCurrentEntity.reader = new UCSReader(fCurrentEntity.stream, UCSReader.UCS4BE); } else { fCurrentEntity.reader = new UCSReader(fCurrentEntity.stream, UCSReader.UCS4LE); } return; } if(ENCODING.equals("ISO-10646-UCS-2")) { if(fCurrentEntity.encoding.equals("UTF-16BE")) { fCurrentEntity.reader = new UCSReader(fCurrentEntity.stream, UCSReader.UCS2BE); } else { fCurrentEntity.reader = new UCSReader(fCurrentEntity.stream, UCSReader.UCS2LE); } return; } } // wrap a new reader around the input stream, changing // the encoding if (DEBUG_ENCODINGS) { System.out.println("$$$ creating new reader from stream: "+ fCurrentEntity.stream); } //fCurrentEntity.stream.reset(); fCurrentEntity.reader = createReader(fCurrentEntity.stream, encoding, null); fCurrentEntity.encoding = encoding; } else { if (DEBUG_ENCODINGS) System.out.println("$$$ reusing old reader on stream"); } } } // setEncoding(String) /** Returns true if the current entity being scanned is external. */ public final boolean isExternal() { return fCurrentEntity.isExternal(); } // isExternal():boolean public int getChar(int relative) throws IOException{ if(arrangeCapacity(relative + 1, false)){ return fCurrentEntity.ch[fCurrentEntity.position + relative]; }else{ return -1; } }//getChar() /** * Returns the next character on the input. * <p> * <strong>Note:</strong> The character is <em>not</em> consumed. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. */ public int peekChar() throws IOException { if (DEBUG_BUFFER) { System.out.print("(peekChar: "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } // peek at character int c = fCurrentEntity.ch[fCurrentEntity.position]; // return peeked character if (DEBUG_BUFFER) { System.out.print(")peekChar: "); print(); if (isExternal) { System.out.println(" -> '"+(c!='\r'?(char)c:'\n')+"'"); } else { System.out.println(" -> '"+(char)c+"'"); } } if (isExternal) { return c != '\r' ? c : '\n'; } else { return c; } } // peekChar():int /** * Returns the next character on the input. * <p> * <strong>Note:</strong> The character is consumed. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. */ public int scanChar() throws IOException { if (DEBUG_BUFFER) { System.out.print("(scanChar: "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } // scan character int c = fCurrentEntity.ch[fCurrentEntity.position++]; if (c == '\n' || (c == '\r' && isExternal)) { fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(1); fCurrentEntity.ch[0] = (char)c; load(1, false); } if (c == '\r' && isExternal) { if (fCurrentEntity.ch[fCurrentEntity.position++] != '\n') { fCurrentEntity.position--; } c = '\n'; } } // return character that was scanned if (DEBUG_BUFFER) { System.out.print(")scanChar: "); print(); System.out.println(" -> '"+(char)c+"'"); } fCurrentEntity.columnNumber++; return c; } // scanChar():int /** * Returns a string matching the NMTOKEN production appearing immediately * on the input as a symbol, or null if NMTOKEN Name string is present. * <p> * <strong>Note:</strong> The NMTOKEN characters are consumed. * <p> * <strong>Note:</strong> The string returned must be a symbol. The * SymbolTable can be used for this purpose. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. * * @see com.sun.org.apache.xerces.internal.util.SymbolTable * @see com.sun.org.apache.xerces.internal.util.XMLChar#isName */ public String scanNmtoken() throws IOException { if (DEBUG_BUFFER) { System.out.print("(scanNmtoken: "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } // scan nmtoken int offset = fCurrentEntity.position; boolean vc = false; char c; while (true){ //while (XMLChar.isName(fCurrentEntity.ch[fCurrentEntity.position])) { c = fCurrentEntity.ch[fCurrentEntity.position]; if(c < 127){ vc = VALID_NAMES[c]; }else{ vc = XMLChar.isName(c); } if(!vc)break; if (++fCurrentEntity.position == fCurrentEntity.count) { int length = fCurrentEntity.position - offset; invokeListeners(length); if (length == fCurrentEntity.fBufferSize) { // bad luck we have to resize our buffer char[] tmp = new char[fCurrentEntity.fBufferSize * 2]; System.arraycopy(fCurrentEntity.ch, offset, tmp, 0, length); fCurrentEntity.ch = tmp; fCurrentEntity.fBufferSize *= 2; } else { System.arraycopy(fCurrentEntity.ch, offset, fCurrentEntity.ch, 0, length); } offset = 0; if (load(length, false)) { break; } } } int length = fCurrentEntity.position - offset; fCurrentEntity.columnNumber += length; // return nmtoken String symbol = null; if (length > 0) { symbol = fSymbolTable.addSymbol(fCurrentEntity.ch, offset, length); } if (DEBUG_BUFFER) { System.out.print(")scanNmtoken: "); print(); System.out.println(" -> "+String.valueOf(symbol)); } return symbol; } // scanNmtoken():String /** * Returns a string matching the Name production appearing immediately * on the input as a symbol, or null if no Name string is present. * <p> * <strong>Note:</strong> The Name characters are consumed. * <p> * <strong>Note:</strong> The string returned must be a symbol. The * SymbolTable can be used for this purpose. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. * * @see com.sun.org.apache.xerces.internal.util.SymbolTable * @see com.sun.org.apache.xerces.internal.util.XMLChar#isName * @see com.sun.org.apache.xerces.internal.util.XMLChar#isNameStart */ public String scanName() throws IOException { if (DEBUG_BUFFER) { System.out.print("(scanName: "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } // scan name int offset = fCurrentEntity.position; if (XMLChar.isNameStart(fCurrentEntity.ch[offset])) { if (++fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(1); fCurrentEntity.ch[0] = fCurrentEntity.ch[offset]; offset = 0; if (load(1, false)) { fCurrentEntity.columnNumber++; String symbol = fSymbolTable.addSymbol(fCurrentEntity.ch, 0, 1); if (DEBUG_BUFFER) { System.out.print(")scanName: "); print(); System.out.println(" -> "+String.valueOf(symbol)); } return symbol; } } boolean vc =false; while (true ){ //XMLChar.isName(fCurrentEntity.ch[fCurrentEntity.position])) ; char c = fCurrentEntity.ch[fCurrentEntity.position]; if(c < 127){ vc = VALID_NAMES[c]; }else{ vc = XMLChar.isName(c); } if(!vc)break; if (++fCurrentEntity.position == fCurrentEntity.count) { int length = fCurrentEntity.position - offset; invokeListeners(length); if (length == fCurrentEntity.fBufferSize) { // bad luck we have to resize our buffer char[] tmp = new char[fCurrentEntity.fBufferSize * 2]; System.arraycopy(fCurrentEntity.ch, offset, tmp, 0, length); fCurrentEntity.ch = tmp; fCurrentEntity.fBufferSize *= 2; } else { System.arraycopy(fCurrentEntity.ch, offset, fCurrentEntity.ch, 0, length); } offset = 0; if (load(length, false)) { break; } } } } int length = fCurrentEntity.position - offset; fCurrentEntity.columnNumber += length; // return name String symbol; if (length > 0) { symbol = fSymbolTable.addSymbol(fCurrentEntity.ch, offset, length); } else symbol = null; if (DEBUG_BUFFER) { System.out.print(")scanName: "); print(); System.out.println(" -> "+String.valueOf(symbol)); } return symbol; } // scanName():String /** * Scans a qualified name from the input, setting the fields of the * QName structure appropriately. * <p> * <strong>Note:</strong> The qualified name characters are consumed. * <p> * <strong>Note:</strong> The strings used to set the values of the * QName structure must be symbols. The SymbolTable can be used for * this purpose. * * @param qname The qualified name structure to fill. * * @return Returns true if a qualified name appeared immediately on * the input and was scanned, false otherwise. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. * * @see com.sun.org.apache.xerces.internal.util.SymbolTable * @see com.sun.org.apache.xerces.internal.util.XMLChar#isName * @see com.sun.org.apache.xerces.internal.util.XMLChar#isNameStart */ public boolean scanQName(QName qname) throws IOException { if (DEBUG_BUFFER) { System.out.print("(scanQName, "+qname+": "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } // scan qualified name int offset = fCurrentEntity.position; //making a check if if the specified character is a valid name start character //as defined by production [5] in the XML 1.0 specification. // Name ::= (Letter | '_' | ':') (NameChar)* if (XMLChar.isNameStart(fCurrentEntity.ch[offset])) { if (++fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(1); fCurrentEntity.ch[0] = fCurrentEntity.ch[offset]; offset = 0; if (load(1, false)) { fCurrentEntity.columnNumber++; //adding into symbol table. //XXX We are trying to add single character in SymbolTable?????? String name = fSymbolTable.addSymbol(fCurrentEntity.ch, 0, 1); qname.setValues(null, name, name, null); if (DEBUG_BUFFER) { System.out.print(")scanQName, "+qname+": "); print(); System.out.println(" -> true"); } return true; } } int index = -1; boolean vc = false; while ( true){ //XMLChar.isName(fCurrentEntity.ch[fCurrentEntity.position])) ; char c = fCurrentEntity.ch[fCurrentEntity.position]; if(c < 127){ vc = VALID_NAMES[c]; }else{ vc = XMLChar.isName(c); } if(!vc)break; if (c == ':') { if (index != -1) { break; } index = fCurrentEntity.position; } if (++fCurrentEntity.position == fCurrentEntity.count) { int length = fCurrentEntity.position - offset; invokeListeners(length); if (length == fCurrentEntity.fBufferSize) { // bad luck we have to resize our buffer char[] tmp = new char[fCurrentEntity.fBufferSize * 2]; System.arraycopy(fCurrentEntity.ch, offset, tmp, 0, length); fCurrentEntity.ch = tmp; fCurrentEntity.fBufferSize *= 2; } else { System.arraycopy(fCurrentEntity.ch, offset, fCurrentEntity.ch, 0, length); } if (index != -1) { index = index - offset; } offset = 0; if (load(length, false)) { break; } } } int length = fCurrentEntity.position - offset; fCurrentEntity.columnNumber += length; if (length > 0) { String prefix = null; String localpart = null; String rawname = fSymbolTable.addSymbol(fCurrentEntity.ch, offset, length); if (index != -1) { int prefixLength = index - offset; prefix = fSymbolTable.addSymbol(fCurrentEntity.ch, offset, prefixLength); int len = length - prefixLength - 1; localpart = fSymbolTable.addSymbol(fCurrentEntity.ch, index + 1, len); } else { localpart = rawname; } qname.setValues(prefix, localpart, rawname, null); if (DEBUG_BUFFER) { System.out.print(")scanQName, "+qname+": "); print(); System.out.println(" -> true"); } return true; } } // no qualified name found if (DEBUG_BUFFER) { System.out.print(")scanQName, "+qname+": "); print(); System.out.println(" -> false"); } return false; } // scanQName(QName):boolean /** * CHANGED: * Scans a range of parsed character data, This function appends the character data to * the supplied buffer. * <p> * <strong>Note:</strong> The characters are consumed. * <p> * <strong>Note:</strong> This method does not guarantee to return * the longest run of parsed character data. This method may return * before markup due to reaching the end of the input buffer or any * other reason. * <p> * * @param content The content structure to fill. * * @return Returns the next character on the input, if known. This * value may be -1 but this does <em>note</em> designate * end of file. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. */ public int scanContent(XMLString content) throws IOException { if (DEBUG_BUFFER) { System.out.print("(scanContent: "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } else if (fCurrentEntity.position == fCurrentEntity.count - 1) { invokeListeners(0); fCurrentEntity.ch[0] = fCurrentEntity.ch[fCurrentEntity.count - 1]; load(1, false); fCurrentEntity.position = 0; } // normalize newlines int offset = fCurrentEntity.position; int c = fCurrentEntity.ch[offset]; int newlines = 0; if (c == '\n' || (c == '\r' && isExternal)) { if (DEBUG_BUFFER) { System.out.print("[newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } do { c = fCurrentEntity.ch[fCurrentEntity.position++]; if (c == '\r' && isExternal) { newlines++; fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count) { offset = 0; invokeListeners(newlines); fCurrentEntity.position = newlines; if (load(newlines, false)) { break; } } if (fCurrentEntity.ch[fCurrentEntity.position] == '\n') { fCurrentEntity.position++; offset++; } /*** NEWLINE NORMALIZATION ***/ else { newlines++; } } else if (c == '\n') { newlines++; fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count) { offset = 0; invokeListeners(newlines); fCurrentEntity.position = newlines; if (load(newlines, false)) { break; } } } else { fCurrentEntity.position--; break; } } while (fCurrentEntity.position < fCurrentEntity.count - 1); for (int i = offset; i < fCurrentEntity.position; i++) { fCurrentEntity.ch[i] = '\n'; } int length = fCurrentEntity.position - offset; if (fCurrentEntity.position == fCurrentEntity.count - 1) { //CHANGED: dont replace the value.. append to the buffer. This gives control to the callee //on buffering the data.. content.setValues(fCurrentEntity.ch, offset, length); //content.append(fCurrentEntity.ch, offset, length); if (DEBUG_BUFFER) { System.out.print("]newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } return -1; } if (DEBUG_BUFFER) { System.out.print("]newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } } while (fCurrentEntity.position < fCurrentEntity.count) { c = fCurrentEntity.ch[fCurrentEntity.position++]; if (!XMLChar.isContent(c)) { fCurrentEntity.position--; break; } } int length = fCurrentEntity.position - offset; fCurrentEntity.columnNumber += length - newlines; //CHANGED: dont replace the value.. append to the buffer. This gives control to the callee //on buffering the data.. content.setValues(fCurrentEntity.ch, offset, length); //content.append(fCurrentEntity.ch, offset, length); // return next character if (fCurrentEntity.position != fCurrentEntity.count) { c = fCurrentEntity.ch[fCurrentEntity.position]; // REVISIT: Does this need to be updated to fix the // #x0D ^#x0A newline normalization problem? -Ac if (c == '\r' && isExternal) { c = '\n'; } } else { c = -1; } if (DEBUG_BUFFER) { System.out.print(")scanContent: "); print(); System.out.println(" -> '"+(char)c+"'"); } return c; } // scanContent(XMLString):int /** * Scans a range of attribute value data, setting the fields of the * XMLString structure, appropriately. * <p> * <strong>Note:</strong> The characters are consumed. * <p> * <strong>Note:</strong> This method does not guarantee to return * the longest run of attribute value data. This method may return * before the quote character due to reaching the end of the input * buffer or any other reason. * <p> * <strong>Note:</strong> The fields contained in the XMLString * structure are not guaranteed to remain valid upon subsequent calls * to the entity scanner. Therefore, the caller is responsible for * immediately using the returned character data or making a copy of * the character data. * * @param quote The quote character that signifies the end of the * attribute value data. * @param content The content structure to fill. * * @return Returns the next character on the input, if known. This * value may be -1 but this does <em>note</em> designate * end of file. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. */ public int scanLiteral(int quote, XMLString content) throws IOException { if (DEBUG_BUFFER) { System.out.print("(scanLiteral, '"+(char)quote+"': "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } else if (fCurrentEntity.position == fCurrentEntity.count - 1) { invokeListeners(0); fCurrentEntity.ch[0] = fCurrentEntity.ch[fCurrentEntity.count - 1]; load(1, false); fCurrentEntity.position = 0; } // normalize newlines int offset = fCurrentEntity.position; int c = fCurrentEntity.ch[offset]; int newlines = 0; if(whiteSpaceInfoNeeded) whiteSpaceLen=0; if (c == '\n' || (c == '\r' && isExternal)) { if (DEBUG_BUFFER) { System.out.print("[newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } do { c = fCurrentEntity.ch[fCurrentEntity.position++]; if (c == '\r' && isExternal) { newlines++; fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(newlines); offset = 0; fCurrentEntity.position = newlines; if (load(newlines, false)) { break; } } if (fCurrentEntity.ch[fCurrentEntity.position] == '\n') { fCurrentEntity.position++; offset++; } /*** NEWLINE NORMALIZATION ***/ else { newlines++; } /***/ } else if (c == '\n') { newlines++; fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count) { offset = 0; invokeListeners(newlines); fCurrentEntity.position = newlines; if (load(newlines, false)) { break; } } /*** NEWLINE NORMALIZATION *** * if (fCurrentEntity.ch[fCurrentEntity.position] == '\r' * && external) { * fCurrentEntity.position++; * offset++; * } * /***/ } else { fCurrentEntity.position--; break; } } while (fCurrentEntity.position < fCurrentEntity.count - 1); int i=0; for ( i = offset; i < fCurrentEntity.position; i++) { fCurrentEntity.ch[i] = '\n'; whiteSpaceLookup[whiteSpaceLen++]=i; } int length = fCurrentEntity.position - offset; if (fCurrentEntity.position == fCurrentEntity.count - 1) { content.setValues(fCurrentEntity.ch, offset, length); if (DEBUG_BUFFER) { System.out.print("]newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } return -1; } if (DEBUG_BUFFER) { System.out.print("]newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } } // scan literal value while (fCurrentEntity.position < fCurrentEntity.count) { c = fCurrentEntity.ch[fCurrentEntity.position++]; if ((c == quote && (!fCurrentEntity.literal || isExternal)) || c == '%' || !XMLChar.isContent(c)) { fCurrentEntity.position--; break; } if(whiteSpaceInfoNeeded){ if(c == 0x20 || c == 0x9){ if(whiteSpaceLen < whiteSpaceLookup.length){ whiteSpaceLookup[whiteSpaceLen++]= fCurrentEntity.position-1; }else{ int [] tmp = new int[whiteSpaceLookup.length*2]; System.arraycopy(whiteSpaceLookup,0,tmp,0,whiteSpaceLookup.length); whiteSpaceLookup = tmp; whiteSpaceLookup[whiteSpaceLen++]= fCurrentEntity.position - 1; } } } } int length = fCurrentEntity.position - offset; fCurrentEntity.columnNumber += length - newlines; content.setValues(fCurrentEntity.ch, offset, length); // return next character if (fCurrentEntity.position != fCurrentEntity.count) { c = fCurrentEntity.ch[fCurrentEntity.position]; // NOTE: We don't want to accidentally signal the // end of the literal if we're expanding an // entity appearing in the literal. -Ac if (c == quote && fCurrentEntity.literal) { c = -1; } } else { c = -1; } if (DEBUG_BUFFER) { System.out.print(")scanLiteral, '"+(char)quote+"': "); print(); System.out.println(" -> '"+(char)c+"'"); } return c; } // scanLiteral(int,XMLString):int //CHANGED: /** * Scans a range of character data up to the specified delimiter, * setting the fields of the XMLString structure, appropriately. * <p> * <strong>Note:</strong> The characters are consumed. * <p> * <strong>Note:</strong> This assumes that the length of the delimiter * and that the delimiter contains at least one character. * <p> * <strong>Note:</strong> This method does not guarantee to return * the longest run of character data. This method may return before * the delimiter due to reaching the end of the input buffer or any * other reason. * <p> * @param delimiter The string that signifies the end of the character * data to be scanned. * @param buffer The XMLStringBuffer to fill. * * @return Returns true if there is more data to scan, false otherwise. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. */ public boolean scanData(String delimiter, XMLStringBuffer buffer) throws IOException { boolean done = false; int delimLen = delimiter.length(); char charAt0 = delimiter.charAt(0); do { if (DEBUG_BUFFER) { System.out.print("(scanData: "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { load(0, true); } boolean bNextEntity = false; while ((fCurrentEntity.position > fCurrentEntity.count - delimLen) && (!bNextEntity)) { System.arraycopy(fCurrentEntity.ch, fCurrentEntity.position, fCurrentEntity.ch, 0, fCurrentEntity.count - fCurrentEntity.position); bNextEntity = load(fCurrentEntity.count - fCurrentEntity.position, false); fCurrentEntity.position = 0; fCurrentEntity.startPosition = 0; } if (fCurrentEntity.position > fCurrentEntity.count - delimLen) { // something must be wrong with the input: e.g., file ends in an unterminated comment int length = fCurrentEntity.count - fCurrentEntity.position; buffer.append (fCurrentEntity.ch, fCurrentEntity.position, length); fCurrentEntity.columnNumber += fCurrentEntity.count; fCurrentEntity.baseCharOffset += (fCurrentEntity.position - fCurrentEntity.startPosition); fCurrentEntity.position = fCurrentEntity.count; fCurrentEntity.startPosition = fCurrentEntity.count; load(0, true); return false; } // normalize newlines int offset = fCurrentEntity.position; int c = fCurrentEntity.ch[offset]; int newlines = 0; if (c == '\n' || (c == '\r' && isExternal)) { if (DEBUG_BUFFER) { System.out.print("[newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } do { c = fCurrentEntity.ch[fCurrentEntity.position++]; if (c == '\r' && isExternal) { newlines++; fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count) { offset = 0; invokeListeners(newlines); fCurrentEntity.position = newlines; if (load(newlines, false)) { break; } } if (fCurrentEntity.ch[fCurrentEntity.position] == '\n') { fCurrentEntity.position++; offset++; } /*** NEWLINE NORMALIZATION ***/ else { newlines++; } } else if (c == '\n') { newlines++; fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count) { offset = 0; invokeListeners(newlines); fCurrentEntity.position = newlines; fCurrentEntity.count = newlines; if (load(newlines, false)) { break; } } } else { fCurrentEntity.position--; break; } } while (fCurrentEntity.position < fCurrentEntity.count - 1); for (int i = offset; i < fCurrentEntity.position; i++) { fCurrentEntity.ch[i] = '\n'; } int length = fCurrentEntity.position - offset; if (fCurrentEntity.position == fCurrentEntity.count - 1) { buffer.append(fCurrentEntity.ch, offset, length); if (DEBUG_BUFFER) { System.out.print("]newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } return true; } if (DEBUG_BUFFER) { System.out.print("]newline, "+offset+", "+fCurrentEntity.position+": "); print(); System.out.println(); } } // iterate over buffer looking for delimiter OUTER: while (fCurrentEntity.position < fCurrentEntity.count) { c = fCurrentEntity.ch[fCurrentEntity.position++]; if (c == charAt0) { // looks like we just hit the delimiter int delimOffset = fCurrentEntity.position - 1; for (int i = 1; i < delimLen; i++) { if (fCurrentEntity.position == fCurrentEntity.count) { fCurrentEntity.position -= i; break OUTER; } c = fCurrentEntity.ch[fCurrentEntity.position++]; if (delimiter.charAt(i) != c) { fCurrentEntity.position -= i; break; } } if (fCurrentEntity.position == delimOffset + delimLen) { done = true; break; } } else if (c == '\n' || (isExternal && c == '\r')) { fCurrentEntity.position--; break; } else if (XMLChar.isInvalid(c)) { fCurrentEntity.position--; int length = fCurrentEntity.position - offset; fCurrentEntity.columnNumber += length - newlines; buffer.append(fCurrentEntity.ch, offset, length); return true; } } int length = fCurrentEntity.position - offset; fCurrentEntity.columnNumber += length - newlines; if (done) { length -= delimLen; } buffer.append(fCurrentEntity.ch, offset, length); // return true if string was skipped if (DEBUG_BUFFER) { System.out.print(")scanData: "); print(); System.out.println(" -> " + done); } } while (!done); return !done; } // scanData(String,XMLString) /** * Skips a character appearing immediately on the input. * <p> * <strong>Note:</strong> The character is consumed only if it matches * the specified character. * * @param c The character to skip. * * @return Returns true if the character was skipped. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. */ public boolean skipChar(int c) throws IOException { if (DEBUG_BUFFER) { System.out.print("(skipChar, '"+(char)c+"': "); print(); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } // skip character int cc = fCurrentEntity.ch[fCurrentEntity.position]; if (cc == c) { fCurrentEntity.position++; if (c == '\n') { fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; } else { fCurrentEntity.columnNumber++; } if (DEBUG_BUFFER) { System.out.print(")skipChar, '"+(char)c+"': "); print(); System.out.println(" -> true"); } return true; } else if (c == '\n' && cc == '\r' && isExternal) { // handle newlines if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(1); fCurrentEntity.ch[0] = (char)cc; load(1, false); } fCurrentEntity.position++; if (fCurrentEntity.ch[fCurrentEntity.position] == '\n') { fCurrentEntity.position++; } fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (DEBUG_BUFFER) { System.out.print(")skipChar, '"+(char)c+"': "); print(); System.out.println(" -> true"); } return true; } // character was not skipped if (DEBUG_BUFFER) { System.out.print(")skipChar, '"+(char)c+"': "); print(); System.out.println(" -> false"); } return false; } // skipChar(int):boolean public boolean isSpace(char ch){ return (ch == ' ') || (ch == '\n') || (ch == '\t') || (ch == '\r'); } /** * Skips space characters appearing immediately on the input. * <p> * <strong>Note:</strong> The characters are consumed only if they are * space characters. * * @return Returns true if at least one space character was skipped. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. * * @see com.sun.org.apache.xerces.internal.util.XMLChar#isSpace */ public boolean skipSpaces() throws IOException { if (DEBUG_BUFFER) { System.out.print("(skipSpaces: "); print(); System.out.println(); } //boolean entityChanged = false; // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); } //we are doing this check only in skipSpace() because it is called by //fMiscDispatcher and we want the parser to exit gracefully when document //is well-formed. //it is possible that end of document is reached and //fCurrentEntity becomes null //nothing was read so entity changed 'false' should be returned. if(fCurrentEntity == null){ return false ; } // skip spaces int c = fCurrentEntity.ch[fCurrentEntity.position]; if (XMLChar.isSpace(c)) { do { boolean entityChanged = false; // handle newlines if (c == '\n' || (isExternal && c == '\r')) { fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count - 1) { invokeListeners(0); fCurrentEntity.ch[0] = (char)c; entityChanged = load(1, true); if (!entityChanged){ // the load change the position to be 1, // need to restore it when entity not changed fCurrentEntity.position = 0; }else if(fCurrentEntity == null){ return true ; } } if (c == '\r' && isExternal) { // REVISIT: Does this need to be updated to fix the // #x0D ^#x0A newline normalization problem? -Ac if (fCurrentEntity.ch[++fCurrentEntity.position] != '\n') { fCurrentEntity.position--; } } } else { fCurrentEntity.columnNumber++; } // load more characters, if needed if (!entityChanged){ fCurrentEntity.position++; } if (fCurrentEntity.position == fCurrentEntity.count) { invokeListeners(0); load(0, true); //we are doing this check only in skipSpace() because it is called by //fMiscDispatcher and we want the parser to exit gracefully when document //is well-formed. //it is possible that end of document is reached and //fCurrentEntity becomes null //nothing was read so entity changed 'false' should be returned. if(fCurrentEntity == null){ return true ; } } } while (XMLChar.isSpace(c = fCurrentEntity.ch[fCurrentEntity.position])); if (DEBUG_BUFFER) { System.out.print(")skipSpaces: "); print(); System.out.println(" -> true"); } return true; } // no spaces were found if (DEBUG_BUFFER) { System.out.print(")skipSpaces: "); print(); System.out.println(" -> false"); } return false; } // skipSpaces():boolean /** * @param legnth This function checks that following number of characters are available. * to the underlying buffer. * @return This function returns true if capacity asked is available. */ public boolean arrangeCapacity(int length) throws IOException{ return arrangeCapacity(length, false); } /** * @param legnth This function checks that following number of characters are available. * to the underlying buffer. * @param if the underlying function should change the entity * @return This function returns true if capacity asked is available. * */ public boolean arrangeCapacity(int length, boolean changeEntity) throws IOException{ //check if the capacity is availble in the current buffer //count is no. of characters in the buffer [x][m][l] //position is '0' based //System.out.println("fCurrent Entity " + fCurrentEntity); if((fCurrentEntity.count - fCurrentEntity.position) >= length) { return true; } if(DEBUG_SKIP_STRING){ System.out.println("fCurrentEntity.count = " + fCurrentEntity.count); System.out.println("fCurrentEntity.position = " + fCurrentEntity.position); System.out.println("length = " + length); } boolean entityChanged = false; //load more characters -- this function shouldn't change the entity while((fCurrentEntity.count - fCurrentEntity.position) < length){ if( (fCurrentEntity.ch.length - fCurrentEntity.position) < length){ invokeListeners(0); System.arraycopy(fCurrentEntity.ch, fCurrentEntity.position, fCurrentEntity.ch,0,fCurrentEntity.count - fCurrentEntity.position); fCurrentEntity.count = fCurrentEntity.count - fCurrentEntity.position; fCurrentEntity.position = 0; } if((fCurrentEntity.count - fCurrentEntity.position) < length){ int pos = fCurrentEntity.position; invokeListeners(pos); entityChanged = load(fCurrentEntity.count, changeEntity); fCurrentEntity.position = pos; if(entityChanged)break; } if(DEBUG_SKIP_STRING){ System.out.println("fCurrentEntity.count = " + fCurrentEntity.count); System.out.println("fCurrentEntity.position = " + fCurrentEntity.position); System.out.println("length = " + length); } } //load changes the position.. set it back to the point where we started. //after loading check again. if((fCurrentEntity.count - fCurrentEntity.position) >= length) { return true; } else { return false; } } /** * Skips the specified string appearing immediately on the input. * <p> * <strong>Note:</strong> The characters are consumed only if all * the characters are skipped. * * @param s The string to skip. * * @return Returns true if the string was skipped. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. */ public boolean skipString(String s) throws IOException { final int length = s.length(); //first make sure that required capacity is avaible if(arrangeCapacity(length, false)){ final int beforeSkip = fCurrentEntity.position ; int afterSkip = fCurrentEntity.position + length - 1 ; if(DEBUG_SKIP_STRING){ System.out.println("skipString,length = " + s + "," + length); System.out.println("Buffer string to be skipped = " + new String(fCurrentEntity.ch, beforeSkip, length)); } //s.charAt() indexes are 0 to 'Length -1' based. int i = length - 1 ; //check from reverse while(s.charAt(i--) == fCurrentEntity.ch[afterSkip]){ if(afterSkip-- == beforeSkip){ fCurrentEntity.position = fCurrentEntity.position + length ; fCurrentEntity.columnNumber += length; return true; } } } return false; } // skipString(String):boolean public boolean skipString(char [] s) throws IOException { final int length = s.length; //first make sure that required capacity is avaible if(arrangeCapacity(length, false)){ int beforeSkip = fCurrentEntity.position ; int afterSkip = fCurrentEntity.position + length ; if(DEBUG_SKIP_STRING){ System.out.println("skipString,length = " + new String(s) + "," + length); System.out.println("skipString,length = " + new String(s) + "," + length); } for(int i=0;i<length;i++){ if(!(fCurrentEntity.ch[beforeSkip++]==s[i])){ return false; } } fCurrentEntity.position = fCurrentEntity.position + length ; fCurrentEntity.columnNumber += length; return true; } return false; } // // Locator methods // // // Private methods // /** * Loads a chunk of text. * * @param offset The offset into the character buffer to * read the next batch of characters. * @param changeEntity True if the load should change entities * at the end of the entity, otherwise leave * the current entity in place and the entity * boundary will be signaled by the return * value. * * @returns Returns true if the entity changed as a result of this * load operation. */ final boolean load(int offset, boolean changeEntity) throws IOException { if (DEBUG_BUFFER) { System.out.print("(load, "+offset+": "); print(); System.out.println(); } //maintaing the count till last load fCurrentEntity.fTotalCountTillLastLoad = fCurrentEntity.fTotalCountTillLastLoad + fCurrentEntity.fLastCount ; // read characters int length = fCurrentEntity.ch.length - offset; if (!fCurrentEntity.mayReadChunks && length > XMLEntityManager.DEFAULT_XMLDECL_BUFFER_SIZE) { length = XMLEntityManager.DEFAULT_XMLDECL_BUFFER_SIZE; } if (DEBUG_BUFFER) System.out.println(" length to try to read: "+length); int count = fCurrentEntity.reader.read(fCurrentEntity.ch, offset, length); if (DEBUG_BUFFER) System.out.println(" length actually read: "+count); // reset count and position boolean entityChanged = false; if (count != -1) { if (count != 0) { // record the last count fCurrentEntity.fLastCount = count; fCurrentEntity.count = count + offset; fCurrentEntity.position = offset; } } // end of this entity else { fCurrentEntity.count = offset; fCurrentEntity.position = offset; entityChanged = true; if (changeEntity) { //notify the entity manager about the end of entity fEntityManager.endEntity(); //return if the current entity becomes null if(fCurrentEntity == null){ throw END_OF_DOCUMENT_ENTITY; } // handle the trailing edges if (fCurrentEntity.position == fCurrentEntity.count) { load(0, true); } } } if (DEBUG_BUFFER) { System.out.print(")load, "+offset+": "); print(); System.out.println(); } return entityChanged; } // load(int, boolean):boolean /** * Creates a reader capable of reading the given input stream in * the specified encoding. * * @param inputStream The input stream. * @param encoding The encoding name that the input stream is * encoded using. If the user has specified that * Java encoding names are allowed, then the * encoding name may be a Java encoding name; * otherwise, it is an ianaEncoding name. * @param isBigEndian For encodings (like uCS-4), whose names cannot * specify a byte order, this tells whether the order is bigEndian. null menas * unknown or not relevant. * * @return Returns a reader. */ protected Reader createReader(InputStream inputStream, String encoding, Boolean isBigEndian) throws IOException { // normalize encoding name if (encoding == null) { encoding = "UTF-8"; } // try to use an optimized reader String ENCODING = encoding.toUpperCase(Locale.ENGLISH); if (ENCODING.equals("UTF-8")) { if (DEBUG_ENCODINGS) { System.out.println("$$$ creating UTF8Reader"); } return new UTF8Reader(inputStream, fCurrentEntity.fBufferSize, fErrorReporter.getMessageFormatter(XMLMessageFormatter.XML_DOMAIN), fErrorReporter.getLocale() ); } if (ENCODING.equals("US-ASCII")) { if (DEBUG_ENCODINGS) { System.out.println("$$$ creating ASCIIReader"); } return new ASCIIReader(inputStream, fCurrentEntity.fBufferSize, fErrorReporter.getMessageFormatter(XMLMessageFormatter.XML_DOMAIN), fErrorReporter.getLocale()); } if(ENCODING.equals("ISO-10646-UCS-4")) { if(isBigEndian != null) { boolean isBE = isBigEndian.booleanValue(); if(isBE) { return new UCSReader(inputStream, UCSReader.UCS4BE); } else { return new UCSReader(inputStream, UCSReader.UCS4LE); } } else { fErrorReporter.reportError(XMLMessageFormatter.XML_DOMAIN, "EncodingByteOrderUnsupported", new Object[] { encoding }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } if(ENCODING.equals("ISO-10646-UCS-2")) { if(isBigEndian != null) { // sould never happen with this encoding... boolean isBE = isBigEndian.booleanValue(); if(isBE) { return new UCSReader(inputStream, UCSReader.UCS2BE); } else { return new UCSReader(inputStream, UCSReader.UCS2LE); } } else { fErrorReporter.reportError(XMLMessageFormatter.XML_DOMAIN, "EncodingByteOrderUnsupported", new Object[] { encoding }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } // check for valid name boolean validIANA = XMLChar.isValidIANAEncoding(encoding); boolean validJava = XMLChar.isValidJavaEncoding(encoding); if (!validIANA || (fAllowJavaEncodings && !validJava)) { fErrorReporter.reportError(XMLMessageFormatter.XML_DOMAIN, "EncodingDeclInvalid", new Object[] { encoding }, XMLErrorReporter.SEVERITY_FATAL_ERROR); // NOTE: AndyH suggested that, on failure, we use ISO Latin 1 // because every byte is a valid ISO Latin 1 character. // It may not translate correctly but if we failed on // the encoding anyway, then we're expecting the content // of the document to be bad. This will just prevent an // invalid UTF-8 sequence to be detected. This is only // important when continue-after-fatal-error is turned // on. -Ac encoding = "ISO-8859-1"; } // try to use a Java reader String javaEncoding = EncodingMap.getIANA2JavaMapping(ENCODING); if (javaEncoding == null) { if(fAllowJavaEncodings) { javaEncoding = encoding; } else { fErrorReporter.reportError(XMLMessageFormatter.XML_DOMAIN, "EncodingDeclInvalid", new Object[] { encoding }, XMLErrorReporter.SEVERITY_FATAL_ERROR); // see comment above. javaEncoding = "ISO8859_1"; } } else if (javaEncoding.equals("ASCII")) { if (DEBUG_ENCODINGS) { System.out.println("$$$ creating ASCIIReader"); } return new ASCIIReader(inputStream, fBufferSize, fErrorReporter.getMessageFormatter(XMLMessageFormatter.XML_DOMAIN), fErrorReporter.getLocale()); } if (DEBUG_ENCODINGS) { System.out.print("$$$ creating Java InputStreamReader: encoding="+javaEncoding); if (javaEncoding == encoding) { System.out.print(" (IANA encoding)"); } System.out.println(); } return new InputStreamReader(inputStream, javaEncoding); } // createReader(InputStream,String, Boolean): Reader /** * Returns the IANA encoding name that is auto-detected from * the bytes specified, with the endian-ness of that encoding where appropriate. * * @param b4 The first four bytes of the input. * @param count The number of bytes actually read. * @return a 2-element array: the first element, an IANA-encoding string, * the second element a Boolean which is true iff the document is big endian, false * if it's little-endian, and null if the distinction isn't relevant. */ protected Object[] getEncodingName(byte[] b4, int count) { if (count < 2) { return new Object[]{"UTF-8", null}; } // UTF-16, with BOM int b0 = b4[0] & 0xFF; int b1 = b4[1] & 0xFF; if (b0 == 0xFE && b1 == 0xFF) { // UTF-16, big-endian return new Object [] {"UTF-16BE", new Boolean(true)}; } if (b0 == 0xFF && b1 == 0xFE) { // UTF-16, little-endian return new Object [] {"UTF-16LE", new Boolean(false)}; } // default to UTF-8 if we don't have enough bytes to make a // good determination of the encoding if (count < 3) { return new Object [] {"UTF-8", null}; } // UTF-8 with a BOM int b2 = b4[2] & 0xFF; if (b0 == 0xEF && b1 == 0xBB && b2 == 0xBF) { return new Object [] {"UTF-8", null}; } // default to UTF-8 if we don't have enough bytes to make a // good determination of the encoding if (count < 4) { return new Object [] {"UTF-8", null}; } // other encodings int b3 = b4[3] & 0xFF; if (b0 == 0x00 && b1 == 0x00 && b2 == 0x00 && b3 == 0x3C) { // UCS-4, big endian (1234) return new Object [] {"ISO-10646-UCS-4", new Boolean(true)}; } if (b0 == 0x3C && b1 == 0x00 && b2 == 0x00 && b3 == 0x00) { // UCS-4, little endian (4321) return new Object [] {"ISO-10646-UCS-4", new Boolean(false)}; } if (b0 == 0x00 && b1 == 0x00 && b2 == 0x3C && b3 == 0x00) { // UCS-4, unusual octet order (2143) // REVISIT: What should this be? return new Object [] {"ISO-10646-UCS-4", null}; } if (b0 == 0x00 && b1 == 0x3C && b2 == 0x00 && b3 == 0x00) { // UCS-4, unusual octect order (3412) // REVISIT: What should this be? return new Object [] {"ISO-10646-UCS-4", null}; } if (b0 == 0x00 && b1 == 0x3C && b2 == 0x00 && b3 == 0x3F) { // UTF-16, big-endian, no BOM // (or could turn out to be UCS-2... // REVISIT: What should this be? return new Object [] {"UTF-16BE", new Boolean(true)}; } if (b0 == 0x3C && b1 == 0x00 && b2 == 0x3F && b3 == 0x00) { // UTF-16, little-endian, no BOM // (or could turn out to be UCS-2... return new Object [] {"UTF-16LE", new Boolean(false)}; } if (b0 == 0x4C && b1 == 0x6F && b2 == 0xA7 && b3 == 0x94) { // EBCDIC // a la xerces1, return CP037 instead of EBCDIC here return new Object [] {"CP037", null}; } // default encoding return new Object [] {"UTF-8", null}; } // getEncodingName(byte[],int):Object[] /** * xxx not removing endEntity() so that i remember that we need to implement it. * Ends an entity. * * @throws XNIException Thrown by entity handler to signal an error. */ // /** Prints the contents of the buffer. */ final void print() { if (DEBUG_BUFFER) { if (fCurrentEntity != null) { System.out.print('['); System.out.print(fCurrentEntity.count); System.out.print(' '); System.out.print(fCurrentEntity.position); if (fCurrentEntity.count > 0) { System.out.print(" \""); for (int i = 0; i < fCurrentEntity.count; i++) { if (i == fCurrentEntity.position) { System.out.print('^'); } char c = fCurrentEntity.ch[i]; switch (c) { case '\n': { System.out.print("\\n"); break; } case '\r': { System.out.print("\\r"); break; } case '\t': { System.out.print("\\t"); break; } case '\\': { System.out.print("\\\\"); break; } default: { System.out.print(c); } } } if (fCurrentEntity.position == fCurrentEntity.count) { System.out.print('^'); } System.out.print('"'); } System.out.print(']'); System.out.print(" @ "); System.out.print(fCurrentEntity.lineNumber); System.out.print(','); System.out.print(fCurrentEntity.columnNumber); } else { System.out.print("*NO CURRENT ENTITY*"); } } } /** * Registers the listener object and provides callback. * @param listener listener to which call back should be provided when scanner buffer * is being changed. */ public void registerListener(XMLBufferListener listener) { if(!listeners.contains(listener)) listeners.add(listener); } /** * * @param loadPos Starting position from which new data is being loaded into scanner buffer. */ private void invokeListeners(int loadPos){ for(int i=0;i<listeners.size();i++){ XMLBufferListener listener =(XMLBufferListener) listeners.get(i); listener.refresh(loadPos); } } /** * Skips space characters appearing immediately on the input that would * match non-terminal S (0x09, 0x0A, 0x0D, 0x20) before end of line * normalization is performed. This is useful when scanning structures * such as the XMLDecl and TextDecl that can only contain US-ASCII * characters. * <p> * <strong>Note:</strong> The characters are consumed only if they would * match non-terminal S before end of line normalization is performed. * * @return Returns true if at least one space character was skipped. * * @throws IOException Thrown if i/o error occurs. * @throws EOFException Thrown on end of file. * * @see com.sun.org.apache.xerces.internal.util.XMLChar#isSpace */ public final boolean skipDeclSpaces() throws IOException { if (DEBUG_BUFFER) { System.out.print("(skipDeclSpaces: "); //XMLEntityManager.print(fCurrentEntity); System.out.println(); } // load more characters, if needed if (fCurrentEntity.position == fCurrentEntity.count) { load(0, true); } // skip spaces int c = fCurrentEntity.ch[fCurrentEntity.position]; if (XMLChar.isSpace(c)) { boolean external = fCurrentEntity.isExternal(); do { boolean entityChanged = false; // handle newlines if (c == '\n' || (external && c == '\r')) { fCurrentEntity.lineNumber++; fCurrentEntity.columnNumber = 1; if (fCurrentEntity.position == fCurrentEntity.count - 1) { fCurrentEntity.ch[0] = (char)c; entityChanged = load(1, true); if (!entityChanged) // the load change the position to be 1, // need to restore it when entity not changed fCurrentEntity.position = 0; } if (c == '\r' && external) { // REVISIT: Does this need to be updated to fix the // #x0D ^#x0A newline normalization problem? -Ac if (fCurrentEntity.ch[++fCurrentEntity.position] != '\n') { fCurrentEntity.position--; } } /*** NEWLINE NORMALIZATION *** * else { * if (fCurrentEntity.ch[fCurrentEntity.position + 1] == '\r' * && external) { * fCurrentEntity.position++; * } * } * /***/ } else { fCurrentEntity.columnNumber++; } // load more characters, if needed if (!entityChanged) fCurrentEntity.position++; if (fCurrentEntity.position == fCurrentEntity.count) { load(0, true); } } while (XMLChar.isSpace(c = fCurrentEntity.ch[fCurrentEntity.position])); if (DEBUG_BUFFER) { System.out.print(")skipDeclSpaces: "); // XMLEntityManager.print(fCurrentEntity); System.out.println(" -> true"); } return true; } // no spaces were found if (DEBUG_BUFFER) { System.out.print(")skipDeclSpaces: "); //XMLEntityManager.print(fCurrentEntity); System.out.println(" -> false"); } return false; } // skipDeclSpaces():boolean } // class XMLEntityScanner
gpl-2.0
AntumDeluge/arianne-stendhal
src/games/stendhal/client/gui/styled/StyleUtil.java
2542
/* $Id$ */ /*************************************************************************** * (C) Copyright 2003-2010 - Stendhal * *************************************************************************** *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ package games.stendhal.client.gui.styled; import java.awt.Graphics; import javax.swing.UIManager; import games.stendhal.client.sprite.Sprite; public class StyleUtil { /** * Get the current <code>Style</code>, or <code>null</code> if it * has not been set in the UIManager. * * @return Current Style, or <code>null</code> if no Style is in use */ public static Style getStyle() { Object obj = UIManager.get("StendhalStyle"); if (obj instanceof Style) { return (Style) obj; } return null; } /** * Fill an area with the background sprite of a {@link Style}. * * @param style the style to be used * @param graphics * @param x left x coordinate * @param y top y coordinate * @param width width of the area * @param height height of the area */ static void fillBackground(Style style, Graphics graphics, int x, int y, int width, int height) { // Prepare clipping graphics = graphics.create(); graphics.clipRect(x, y, width, height); Sprite image = style.getBackground(); for (int i = x; i < x + width; i += image.getWidth()) { for (int j = y; j < y + height; j += image.getHeight()) { image.draw(graphics, i, j); } } graphics.dispose(); } /** * Paint disabled text using a style's highlight and shadow colors. * * @param style style to be used * @param g graphics * @param text painted string * @param x left x coordinate * @param y baseline y coordinate */ static void paintDisabledText(Style style, Graphics g, String text, int x, int y) { g.setColor(style.getHighLightColor()); g.drawString(text, x + 1, y + 1); g.setColor(style.getShadowColor()); g.drawString(text, x, y); } }
gpl-2.0
AntumDeluge/arianne-stendhal
tests/games/stendhal/client/RPObjectChangeDispatcherTest.java
2360
/* $Id$ */ /*************************************************************************** * (C) Copyright 2003-2010 - Stendhal * *************************************************************************** *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ package games.stendhal.client; import static org.junit.Assert.assertTrue; import org.junit.Test; import games.stendhal.client.listener.RPObjectChangeListener; import marauroa.common.game.RPObject; public class RPObjectChangeDispatcherTest { /** * Tests for dispatchModifyRemoved. */ @Test public void testDispatchModifyRemoved() { final RPObjectChangeListener listener = new RPObjectChangeListener() { @Override public void onAdded(final RPObject object) { } @Override public void onChangedAdded(final RPObject object, final RPObject changes) { } @Override public void onChangedRemoved(final RPObject object, final RPObject changes) { } @Override public void onRemoved(final RPObject object) { } @Override public void onSlotAdded(final RPObject object, final String slotName, final RPObject sobject) { } @Override public void onSlotChangedAdded(final RPObject object, final String slotName, final RPObject sobject, final RPObject schanges) { } @Override public void onSlotChangedRemoved(final RPObject object, final String slotName, final RPObject sobject, final RPObject schanges) { } @Override public void onSlotRemoved(final RPObject object, final String slotName, final RPObject sobject) { } }; final RPObjectChangeDispatcher dispatcher = new RPObjectChangeDispatcher(listener, listener); dispatcher.dispatchModifyRemoved(null, null); assertTrue("make sure we have no NPE", true); } }
gpl-2.0
consulo/consulo-vim
src/main/java/com/maddyhome/idea/vim/action/motion/scroll/MotionScrollColumnLeftAction.java
1579
/* * IdeaVim - Vim emulator for IDEs based on the IntelliJ platform * Copyright (C) 2003-2016 The IdeaVim authors * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.maddyhome.idea.vim.action.motion.scroll; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.actionSystem.EditorAction; import com.maddyhome.idea.vim.VimPlugin; import com.maddyhome.idea.vim.command.Command; import com.maddyhome.idea.vim.handler.EditorActionHandlerBase; import org.jetbrains.annotations.NotNull; /** */ public class MotionScrollColumnLeftAction extends EditorAction { public MotionScrollColumnLeftAction() { super(new Handler()); } private static class Handler extends EditorActionHandlerBase { protected boolean execute(@NotNull Editor editor, @NotNull DataContext context, @NotNull Command cmd) { return VimPlugin.getMotion().scrollColumn(editor, cmd.getCount()); } } }
gpl-2.0
matheusvervloet/jabref
src/main/java/net/sf/jabref/logic/cleanup/FileLinksCleanup.java
2115
/* Copyright (C) 2003-2015 JabRef contributors. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package net.sf.jabref.logic.cleanup; import java.util.Collections; import java.util.List; import java.util.Optional; import net.sf.jabref.Globals; import net.sf.jabref.logic.FieldChange; import net.sf.jabref.model.entry.BibEntry; import net.sf.jabref.model.entry.FileField; import net.sf.jabref.model.entry.ParsedFileField; /** * Fixes the format of the file field. For example, if the file link is empty but the description wrongly contains the path. */ public class FileLinksCleanup implements CleanupJob { @Override public List<FieldChange> cleanup(BibEntry entry) { Optional<String> oldValue = entry.getFieldOptional(Globals.FILE_FIELD); if (!oldValue.isPresent()) { return Collections.emptyList(); } List<ParsedFileField> fileList = FileField.parse(oldValue.get()); // Parsing automatically moves a single description to link, so we just need to write the fileList back again String newValue = FileField.getStringRepresentation(fileList); if (!oldValue.get().equals(newValue)) { entry.setField(Globals.FILE_FIELD, newValue); FieldChange change = new FieldChange(entry, Globals.FILE_FIELD, oldValue.get(), newValue); return Collections.singletonList(change); } return Collections.emptyList(); } }
gpl-2.0
hbbpb/stanford-corenlp-gv
src/edu/stanford/nlp/parser/lexparser/FactoredLexicon.java
20642
package edu.stanford.nlp.parser.lexparser; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; import edu.stanford.nlp.international.Language; import edu.stanford.nlp.international.arabic.ArabicMorphoFeatureSpecification; import edu.stanford.nlp.international.french.FrenchMorphoFeatureSpecification; import edu.stanford.nlp.international.morph.MorphoFeatureSpecification; import edu.stanford.nlp.international.morph.MorphoFeatureSpecification.MorphoFeatureType; import edu.stanford.nlp.ling.CoreLabel; import edu.stanford.nlp.ling.Label; import edu.stanford.nlp.stats.ClassicCounter; import edu.stanford.nlp.stats.Counter; import edu.stanford.nlp.stats.Counters; import edu.stanford.nlp.stats.TwoDimensionalIntCounter; import edu.stanford.nlp.trees.Tree; import edu.stanford.nlp.trees.Treebank; import edu.stanford.nlp.util.Generics; import edu.stanford.nlp.util.HashIndex; import edu.stanford.nlp.util.Index; import edu.stanford.nlp.util.Pair; /** * * @author Spence Green * */ public class FactoredLexicon extends BaseLexicon { private static final long serialVersionUID = -744693222804176489L; private static final boolean DEBUG = false; private MorphoFeatureSpecification morphoSpec; private static final String NO_MORPH_ANALYSIS = "xXxNONExXx"; private Index<String> morphIndex = new HashIndex<>(); private TwoDimensionalIntCounter<Integer,Integer> wordTag = new TwoDimensionalIntCounter<>(40000); private Counter<Integer> wordTagUnseen = new ClassicCounter<>(500); private TwoDimensionalIntCounter<Integer,Integer> lemmaTag = new TwoDimensionalIntCounter<>(40000); private Counter<Integer> lemmaTagUnseen = new ClassicCounter<>(500); private TwoDimensionalIntCounter<Integer,Integer> morphTag = new TwoDimensionalIntCounter<>(500); private Counter<Integer> morphTagUnseen = new ClassicCounter<>(500); private Counter<Integer> tagCounter = new ClassicCounter<>(300); public FactoredLexicon(MorphoFeatureSpecification morphoSpec, Index<String> wordIndex, Index<String> tagIndex) { super(wordIndex, tagIndex); this.morphoSpec = morphoSpec; } public FactoredLexicon(Options op, MorphoFeatureSpecification morphoSpec, Index<String> wordIndex, Index<String> tagIndex) { super(op, wordIndex, tagIndex); this.morphoSpec = morphoSpec; } /** * Rule table is lemmas. So isKnown() is slightly trickier. */ @Override public Iterator<IntTaggedWord> ruleIteratorByWord(int word, int loc, String featureSpec) { if (word == wordIndex.indexOf(BOUNDARY)) { // Deterministic tagging of the boundary symbol return rulesWithWord[word].iterator(); } else if (isKnown(word)) { // Strict lexical tagging for seen *lemma* types // We need to copy the word form into the rules, which currently have lemmas in them return rulesWithWord[word].iterator(); } else { if (DEBUG) System.err.println("UNKNOWN WORD"); // Unknown word signatures Set<IntTaggedWord> lexRules = Generics.newHashSet(10); List<IntTaggedWord> uwRules = rulesWithWord[wordIndex.indexOf(UNKNOWN_WORD)]; // Inject the word into these rules instead of the UW signature for (IntTaggedWord iTW : uwRules) { lexRules.add(new IntTaggedWord(word, iTW.tag)); } return lexRules.iterator(); } } @Override public float score(IntTaggedWord iTW, int loc, String word, String featureSpec) { final int wordId = iTW.word(); final int tagId = iTW.tag(); // Force 1-best path to go through the boundary symbol // (deterministic tagging) final int boundaryId = wordIndex.indexOf(BOUNDARY); final int boundaryTagId = tagIndex.indexOf(BOUNDARY_TAG); if (wordId == boundaryId && tagId == boundaryTagId) { return 0.0f; } // Morphological features String tag = tagIndex.get(iTW.tag()); Pair<String,String> lemmaMorph = MorphoFeatureSpecification.splitMorphString(word, featureSpec); String lemma = lemmaMorph.first(); int lemmaId = wordIndex.indexOf(lemma); String richMorphTag = lemmaMorph.second(); String reducedMorphTag = morphoSpec.strToFeatures(richMorphTag).toString().trim(); reducedMorphTag = reducedMorphTag.length() == 0 ? NO_MORPH_ANALYSIS : reducedMorphTag; int morphId = morphIndex.addToIndex(reducedMorphTag); // Score the factors and create the rule score p_W_T double p_W_Tf = Math.log(probWordTag(word, loc, wordId, tagId)); // double p_L_T = Math.log(probLemmaTag(word, loc, tagId, lemmaId)); double p_L_T = 0.0; double p_M_T = Math.log(probMorphTag(tagId, morphId)); double p_W_T = p_W_Tf + p_L_T + p_M_T; if (DEBUG) { // String tag = tagIndex.get(tagId); System.err.printf("WSGDEBUG: %s --> %s %s %s || %.10f (%.5f / %.5f / %.5f)%n", tag, word, lemma, reducedMorphTag, p_W_T, p_W_Tf, p_L_T, p_M_T); } // Filter low probability taggings return p_W_T > -100.0 ? (float) p_W_T : Float.NEGATIVE_INFINITY; } private double probWordTag(String word, int loc, int wordId, int tagId) { double cW = wordTag.totalCount(wordId); double cWT = wordTag.getCount(wordId, tagId); // p_L double p_W = cW / wordTag.totalCount(); // p_T double cTseen = tagCounter.getCount(tagId); double p_T = cTseen / tagCounter.totalCount(); // p_T_L double p_W_T = 0.0; if (cW > 0.0) { // Seen lemma double p_T_W = 0.0; if (cW > 100.0 && cWT > 0.0) { p_T_W = cWT / cW; } else { double cTunseen = wordTagUnseen.getCount(tagId); // TODO p_T_U is 0? double p_T_U = cTunseen / wordTagUnseen.totalCount(); p_T_W = (cWT + smooth[1]*p_T_U) / (cW + smooth[1]); } p_W_T = p_T_W * p_W / p_T; } else { // Unseen word. Score based on the word signature (of the surface form) IntTaggedWord iTW = new IntTaggedWord(wordId, tagId); double c_T = tagCounter.getCount(tagId); p_W_T = Math.exp(getUnknownWordModel().score(iTW, loc, c_T, tagCounter.totalCount(), smooth[0], word)); } return p_W_T; } /** * This method should never return 0!! */ private double probLemmaTag(String word, int loc, int tagId, int lemmaId) { double cL = lemmaTag.totalCount(lemmaId); double cLT = lemmaTag.getCount(lemmaId, tagId); // p_L double p_L = cL / lemmaTag.totalCount(); // p_T double cTseen = tagCounter.getCount(tagId); double p_T = cTseen / tagCounter.totalCount(); // p_T_L double p_L_T = 0.0; if (cL > 0.0) { // Seen lemma double p_T_L = 0.0; if (cL > 100.0 && cLT > 0.0) { p_T_L = cLT / cL; } else { double cTunseen = lemmaTagUnseen.getCount(tagId); // TODO(spenceg): p_T_U is 0?? double p_T_U = cTunseen / lemmaTagUnseen.totalCount(); p_T_L = (cLT + smooth[1]*p_T_U) / (cL + smooth[1]); } p_L_T = p_T_L * p_L / p_T; } else { // Unseen lemma. Score based on the word signature (of the surface form) // Hack double cTunseen = lemmaTagUnseen.getCount(tagId); p_L_T = cTunseen / tagCounter.totalCount(); // int wordId = wordIndex.indexOf(word); // IntTaggedWord iTW = new IntTaggedWord(wordId, tagId); // double c_T = tagCounter.getCount(tagId); // p_L_T = Math.exp(getUnknownWordModel().score(iTW, loc, c_T, tagCounter.totalCount(), smooth[0], word)); } return p_L_T; } /** * This method should never return 0! */ private double probMorphTag(int tagId, int morphId) { double cM = morphTag.totalCount(morphId); double cMT = morphTag.getCount(morphId, tagId); // p_M double p_M = cM / morphTag.totalCount(); // p_T double cTseen = tagCounter.getCount(tagId); double p_T = cTseen / tagCounter.totalCount(); double p_M_T = 0.0; if (cM > 100.0 && cMT > 0.0) { double p_T_M = cMT / cM; // else { // double cTunseen = morphTagUnseen.getCount(tagId); // double p_T_U = cTunseen / morphTagUnseen.totalCount(); // p_T_M = (cMT + smooth[1]*p_T_U) / (cM + smooth[1]); // } p_M_T = p_T_M * p_M / p_T; } else { // Unseen morphological analysis // Hack....unseen morph tags are extremely rare // Add+1 smoothing p_M_T = 1.0 / (morphTag.totalCount() + tagIndex.size() + 1.0); } return p_M_T; } /** * This method should populate wordIndex, tagIndex, and morphIndex. */ @Override public void train(Collection<Tree> trees, Collection<Tree> rawTrees) { double weight = 1.0; // Train uw model on words uwModelTrainer.train(trees, weight); final double numTrees = trees.size(); Iterator<Tree> rawTreesItr = rawTrees == null ? null : rawTrees.iterator(); Iterator<Tree> treeItr = trees.iterator(); // Train factored lexicon on lemmas and morph tags int treeId = 0; while (treeItr.hasNext()) { Tree tree = treeItr.next(); // CoreLabels, with morph analysis in the originalText annotation List<Label> yield = rawTrees == null ? tree.yield() : rawTreesItr.next().yield(); // Annotated, binarized tree for the tags (labels are usually CategoryWordTag) List<Label> pretermYield = tree.preTerminalYield(); int yieldLen = yield.size(); for (int i = 0; i < yieldLen; ++i) { String word = yield.get(i).value(); int wordId = wordIndex.addToIndex(word); // Don't do anything with words String tag = pretermYield.get(i).value(); int tagId = tagIndex.addToIndex(tag); // Use the word as backup if there is no lemma String featureStr = ((CoreLabel) yield.get(i)).originalText(); Pair<String,String> lemmaMorph = MorphoFeatureSpecification.splitMorphString(word, featureStr); String lemma = lemmaMorph.first(); int lemmaId = wordIndex.addToIndex(lemma); String richMorphTag = lemmaMorph.second(); String reducedMorphTag = morphoSpec.strToFeatures(richMorphTag).toString().trim(); reducedMorphTag = reducedMorphTag.isEmpty() ? NO_MORPH_ANALYSIS : reducedMorphTag; int morphId = morphIndex.addToIndex(reducedMorphTag); // Seen event counts wordTag.incrementCount(wordId, tagId); lemmaTag.incrementCount(lemmaId, tagId); morphTag.incrementCount(morphId, tagId); tagCounter.incrementCount(tagId); // Unseen event counts if (treeId > op.trainOptions.fractionBeforeUnseenCounting*numTrees) { if (! wordTag.firstKeySet().contains(wordId) || wordTag.getCounter(wordId).totalCount() < 2) { wordTagUnseen.incrementCount(tagId); } if (! lemmaTag.firstKeySet().contains(lemmaId) || lemmaTag.getCounter(lemmaId).totalCount() < 2) { lemmaTagUnseen.incrementCount(tagId); } if (! morphTag.firstKeySet().contains(morphId) || morphTag.getCounter(morphId).totalCount() < 2) { morphTagUnseen.incrementCount(tagId); } } } ++treeId; if (DEBUG && (treeId % 100) == 0) { System.err.printf("[%d]",treeId); } if (DEBUG && (treeId % 10000) == 0) { System.err.println(); } } } /** * Rule table is lemmas! */ @Override protected void initRulesWithWord() { // Add synthetic symbols to the indices int unkWord = wordIndex.addToIndex(UNKNOWN_WORD); int boundaryWordId = wordIndex.addToIndex(BOUNDARY); int boundaryTagId = tagIndex.addToIndex(BOUNDARY_TAG); // Initialize rules table final int numWords = wordIndex.size(); rulesWithWord = new List[numWords]; for (int w = 0; w < numWords; w++) { rulesWithWord[w] = new ArrayList<>(1); } // Collect rules, indexed by word Set<IntTaggedWord> lexRules = Generics.newHashSet(40000); for (int wordId : wordTag.firstKeySet()) { for (int tagId : wordTag.getCounter(wordId).keySet()) { lexRules.add(new IntTaggedWord(wordId, tagId)); lexRules.add(new IntTaggedWord(nullWord, tagId)); } } // Known words and signatures for (IntTaggedWord iTW : lexRules) { if (iTW.word() == nullWord) { // Mix in UW signature rules for open class types double types = uwModel.unSeenCounter().getCount(iTW); if (types > trainOptions.openClassTypesThreshold) { IntTaggedWord iTU = new IntTaggedWord(unkWord, iTW.tag); if (!rulesWithWord[unkWord].contains(iTU)) { rulesWithWord[unkWord].add(iTU); } } } else { // Known word rulesWithWord[iTW.word].add(iTW); } } System.err.print("The " + rulesWithWord[unkWord].size() + " open class tags are: ["); for (IntTaggedWord item : rulesWithWord[unkWord]) { System.err.print(" " + tagIndex.get(item.tag())); } System.err.println(" ] "); // Boundary symbol has one tagging rulesWithWord[boundaryWordId].add(new IntTaggedWord(boundaryWordId, boundaryTagId)); } /** * Convert a treebank to factored lexicon events for fast iteration in the * optimizer. */ private static List<FactoredLexiconEvent> treebankToLexiconEvents(List<Tree> treebank, FactoredLexicon lexicon) { List<FactoredLexiconEvent> events = new ArrayList<>(70000); for (Tree tree : treebank) { List<Label> yield = tree.yield(); List<Label> preterm = tree.preTerminalYield(); assert yield.size() == preterm.size(); int yieldLen = yield.size(); for (int i = 0; i < yieldLen; ++i) { String tag = preterm.get(i).value(); int tagId = lexicon.tagIndex.indexOf(tag); String word = yield.get(i).value(); int wordId = lexicon.wordIndex.indexOf(word); // Two checks to see if we keep this example if (tagId < 0) { System.err.println("Discarding training example: " + word + " " + tag); continue; } // if (counts.probWordTag(wordId, tagId) == 0.0) { // System.err.println("Discarding low counts <w,t> pair: " + word + " " + tag); // continue; // } String featureStr = ((CoreLabel) yield.get(i)).originalText(); Pair<String,String> lemmaMorph = MorphoFeatureSpecification.splitMorphString(word, featureStr); String lemma = lemmaMorph.first(); String richTag = lemmaMorph.second(); String reducedTag = lexicon.morphoSpec.strToFeatures(richTag).toString(); reducedTag = reducedTag.length() == 0 ? NO_MORPH_ANALYSIS : reducedTag; int lemmaId = lexicon.wordIndex.indexOf(lemma); int morphId = lexicon.morphIndex.indexOf(reducedTag); FactoredLexiconEvent event = new FactoredLexiconEvent(wordId, tagId, lemmaId, morphId, i, word, featureStr); events.add(event); } } return events; } private static List<FactoredLexiconEvent> getTuningSet(Treebank devTreebank, FactoredLexicon lexicon, TreebankLangParserParams tlpp) { List<Tree> devTrees = new ArrayList<>(3000); for (Tree tree : devTreebank) { for (Tree subTree : tree) { if (!subTree.isLeaf()) { tlpp.transformTree(subTree, tree); } } devTrees.add(tree); } List<FactoredLexiconEvent> tuningSet = treebankToLexiconEvents(devTrees, lexicon); return tuningSet; } private static Options getOptions(Language language) { Options options = new Options(); if (language.equals(Language.Arabic)) { options.lexOptions.useUnknownWordSignatures = 9; options.lexOptions.unknownPrefixSize = 1; options.lexOptions.unknownSuffixSize = 1; options.lexOptions.uwModelTrainer = "edu.stanford.nlp.parser.lexparser.ArabicUnknownWordModelTrainer"; } else if (language.equals(Language.French)) { options.lexOptions.useUnknownWordSignatures = 1; options.lexOptions.unknownPrefixSize = 1; options.lexOptions.unknownSuffixSize = 2; options.lexOptions.uwModelTrainer = "edu.stanford.nlp.parser.lexparser.FrenchUnknownWordModelTrainer"; } else { throw new UnsupportedOperationException(); } return options; } /** * @param args */ public static void main(String[] args) { if (args.length != 4) { System.err.printf("Usage: java %s language features train_file dev_file%n", FactoredLexicon.class.getName()); System.exit(-1); } // Command line options Language language = Language.valueOf(args[0]); TreebankLangParserParams tlpp = language.params; Treebank trainTreebank = tlpp.diskTreebank(); trainTreebank.loadPath(args[2]); Treebank devTreebank = tlpp.diskTreebank(); devTreebank.loadPath(args[3]); MorphoFeatureSpecification morphoSpec; Options options = getOptions(language); if (language.equals(Language.Arabic)) { morphoSpec = new ArabicMorphoFeatureSpecification(); String[] languageOptions = {"-arabicFactored"}; tlpp.setOptionFlag(languageOptions, 0); } else if (language.equals(Language.French)) { morphoSpec = new FrenchMorphoFeatureSpecification(); String[] languageOptions = {"-frenchFactored"}; tlpp.setOptionFlag(languageOptions, 0); } else { throw new UnsupportedOperationException(); } String featureList = args[1]; String[] features = featureList.trim().split(","); for (String feature : features) { morphoSpec.activate(MorphoFeatureType.valueOf(feature)); } System.out.println("Language: " + language.toString()); System.out.println("Features: " + args[1]); // Create word and tag indices // Save trees in a collection since the interface requires that.... System.out.print("Loading training trees..."); List<Tree> trainTrees = new ArrayList<>(19000); Index<String> wordIndex = new HashIndex<>(); Index<String> tagIndex = new HashIndex<>(); for (Tree tree : trainTreebank) { for (Tree subTree : tree) { if (!subTree.isLeaf()) { tlpp.transformTree(subTree, tree); } } trainTrees.add(tree); } System.out.printf("Done! (%d trees)%n", trainTrees.size()); // Setup and train the lexicon. System.out.print("Collecting sufficient statistics for lexicon..."); FactoredLexicon lexicon = new FactoredLexicon(options, morphoSpec, wordIndex, tagIndex); lexicon.initializeTraining(trainTrees.size()); lexicon.train(trainTrees, null); lexicon.finishTraining(); System.out.println("Done!"); trainTrees = null; // Load the tuning set System.out.print("Loading tuning set..."); List<FactoredLexiconEvent> tuningSet = getTuningSet(devTreebank, lexicon, tlpp); System.out.printf("...Done! (%d events)%n", tuningSet.size()); // Print the probabilities that we obtain // TODO(spenceg): Implement tagging accuracy with FactLex int nCorrect = 0; Counter<String> errors = new ClassicCounter<>(); for (FactoredLexiconEvent event : tuningSet) { Iterator<IntTaggedWord> itr = lexicon.ruleIteratorByWord(event.word(), event.getLoc(), event.featureStr()); Counter<Integer> logScores = new ClassicCounter<>(); boolean noRules = true; int goldTagId = -1; while (itr.hasNext()) { noRules = false; IntTaggedWord iTW = itr.next(); if (iTW.tag() == event.tagId()) { System.err.print("GOLD-"); goldTagId = iTW.tag(); } float tagScore = lexicon.score(iTW, event.getLoc(), event.word(), event.featureStr()); logScores.incrementCount(iTW.tag(), tagScore); } if (noRules) { System.err.printf("NO TAGGINGS: %s %s%n", event.word(), event.featureStr()); } else { // Score the tagging int hypTagId = Counters.argmax(logScores); if (hypTagId == goldTagId) { ++nCorrect; } else { String goldTag = goldTagId < 0 ? "UNSEEN" : lexicon.tagIndex.get(goldTagId); errors.incrementCount(goldTag); } } System.err.println(); } // Output accuracy double acc = (double) nCorrect / (double) tuningSet.size(); System.err.printf("%n%nACCURACY: %.2f%n%n", acc*100.0); System.err.println("% of errors by type:"); List<String> biggestKeys = new ArrayList<>(errors.keySet()); Collections.sort(biggestKeys, Counters.toComparator(errors, false, true)); Counters.normalize(errors); for (String key : biggestKeys) { System.err.printf("%s\t%.2f%n", key, errors.getCount(key)*100.0); } } }
gpl-2.0
geneos/adempiere
base/src/org/compiere/process/BankStatementPayment.java
8489
/****************************************************************************** * Product: Adempiere ERP & CRM Smart Business Solution * * Copyright (C) 1999-2006 ComPiere, Inc. All Rights Reserved. * * This program is free software; you can redistribute it and/or modify it * * under the terms version 2 of the GNU General Public License as published * * by the Free Software Foundation. This program is distributed in the hope * * that it will be useful, but WITHOUT ANY WARRANTY; without even the implied * * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * * with this program; if not, write to the Free Software Foundation, Inc., * * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. * * For the text or an alternative of this public license, you may reach us * * ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA * * or via info@compiere.org or http://www.compiere.org/license.html * *****************************************************************************/ package org.compiere.process; import java.math.BigDecimal; import java.sql.Timestamp; import java.util.logging.Level; import org.compiere.model.MBankStatement; import org.compiere.model.MBankStatementLine; import org.compiere.model.MInvoice; import org.compiere.model.MPayment; import org.compiere.model.X_I_BankStatement; import org.compiere.util.AdempiereSystemError; import org.compiere.util.AdempiereUserError; import org.compiere.util.Env; /** * Create Payment from Bank Statement Info * * @author Jorg Janke * @version $Id: BankStatementPayment.java,v 1.3 2006/07/30 00:51:01 jjanke Exp $ */ public class BankStatementPayment extends SvrProcess { /** * Prepare - e.g., get Parameters. */ protected void prepare() { ProcessInfoParameter[] para = getParameter(); for (int i = 0; i < para.length; i++) { String name = para[i].getParameterName(); if (para[i].getParameter() == null) ; else log.log(Level.SEVERE, "Unknown Parameter: " + name); } } // prepare /** * Perform process. * @return Message * @throws Exception if not successful */ protected String doIt() throws Exception { int Table_ID = getTable_ID(); int Record_ID = getRecord_ID(); log.info ("Table_ID=" + Table_ID + ", Record_ID=" + Record_ID); if (Table_ID == X_I_BankStatement.Table_ID) return createPayment (new X_I_BankStatement(getCtx(), Record_ID, get_TrxName())); else if (Table_ID == MBankStatementLine.Table_ID) return createPayment (new MBankStatementLine(getCtx(), Record_ID, get_TrxName())); throw new AdempiereSystemError("??"); } // doIt /** * Create Payment for Import * @param ibs import bank statement * @return Message * @throws Exception if not successful */ private String createPayment (X_I_BankStatement ibs) throws Exception { if (ibs == null || ibs.getC_Payment_ID() != 0) return "--"; log.fine(ibs.toString()); if (ibs.getC_Invoice_ID() == 0 && ibs.getC_BPartner_ID() == 0) throw new AdempiereUserError ("@NotFound@ @C_Invoice_ID@ / @C_BPartner_ID@"); if (ibs.getC_BankAccount_ID() == 0) throw new AdempiereUserError ("@NotFound@ @C_BankAccount_ID@"); // MPayment payment = createPayment (ibs.getC_Invoice_ID(), ibs.getC_BPartner_ID(), ibs.getC_Currency_ID(), ibs.getStmtAmt(), ibs.getTrxAmt(), ibs.getC_BankAccount_ID(), ibs.getStatementLineDate() == null ? ibs.getStatementDate() : ibs.getStatementLineDate(), ibs.getDateAcct(), ibs.getDescription(), ibs.getAD_Org_ID()); if (payment == null) throw new AdempiereSystemError("Could not create Payment"); ibs.setC_Payment_ID(payment.getC_Payment_ID()); ibs.setC_Currency_ID (payment.getC_Currency_ID()); ibs.setTrxAmt(payment.getPayAmt(true)); ibs.save(); // String retString = "@C_Payment_ID@ = " + payment.getDocumentNo(); if (payment.getOverUnderAmt().signum() != 0) retString += " - @OverUnderAmt@=" + payment.getOverUnderAmt(); return retString; } // createPayment - Import /** * Create Payment for BankStatement * @param bsl bank statement Line * @return Message * @throws Exception if not successful */ private String createPayment (MBankStatementLine bsl) throws Exception { if (bsl == null || bsl.getC_Payment_ID() != 0) return "--"; log.fine(bsl.toString()); if (bsl.getC_Invoice_ID() == 0 && bsl.getC_BPartner_ID() == 0) throw new AdempiereUserError ("@NotFound@ @C_Invoice_ID@ / @C_BPartner_ID@"); // MBankStatement bs = new MBankStatement (getCtx(), bsl.getC_BankStatement_ID(), get_TrxName()); // MPayment payment = createPayment (bsl.getC_Invoice_ID(), bsl.getC_BPartner_ID(), bsl.getC_Currency_ID(), bsl.getStmtAmt(), bsl.getTrxAmt(), bs.getC_BankAccount_ID(), bsl.getStatementLineDate(), bsl.getDateAcct(), bsl.getDescription(), bsl.getAD_Org_ID()); if (payment == null) throw new AdempiereSystemError("Could not create Payment"); // update statement bsl.setPayment(payment); bsl.save(); // String retString = "@C_Payment_ID@ = " + payment.getDocumentNo(); if (payment.getOverUnderAmt().signum() != 0) retString += " - @OverUnderAmt@=" + payment.getOverUnderAmt(); return retString; } // createPayment /** * Create actual Payment * @param C_Invoice_ID invoice * @param C_BPartner_ID partner ignored when invoice exists * @param C_Currency_ID currency * @param StmtAmt statement amount * @param TrxAmt transaction amt * @param C_BankAccount_ID bank account * @param DateTrx transaction date * @param DateAcct accounting date * @param Description description * @param AD_Org_ID org * @return payment */ private MPayment createPayment (int C_Invoice_ID, int C_BPartner_ID, int C_Currency_ID, BigDecimal StmtAmt, BigDecimal TrxAmt, int C_BankAccount_ID, Timestamp DateTrx, Timestamp DateAcct, String Description, int AD_Org_ID) { // Trx Amount = Payment overwrites Statement Amount if defined BigDecimal PayAmt = TrxAmt; if (PayAmt == null || Env.ZERO.compareTo(PayAmt) == 0) PayAmt = StmtAmt; if (C_Invoice_ID == 0 && (PayAmt == null || Env.ZERO.compareTo(PayAmt) == 0)) throw new IllegalStateException ("@PayAmt@ = 0"); if (PayAmt == null) PayAmt = Env.ZERO; // MPayment payment = new MPayment (getCtx(), 0, get_TrxName()); payment.setAD_Org_ID(AD_Org_ID); payment.setC_BankAccount_ID(C_BankAccount_ID); payment.setTenderType(MPayment.TENDERTYPE_Check); if (DateTrx != null) payment.setDateTrx(DateTrx); else if (DateAcct != null) payment.setDateTrx(DateAcct); if (DateAcct != null) payment.setDateAcct(DateAcct); else payment.setDateAcct(payment.getDateTrx()); payment.setDescription(Description); // if (C_Invoice_ID != 0) { MInvoice invoice = new MInvoice (getCtx(), C_Invoice_ID, null); payment.setC_DocType_ID(invoice.isSOTrx()); // Receipt payment.setC_Invoice_ID(invoice.getC_Invoice_ID()); payment.setC_BPartner_ID (invoice.getC_BPartner_ID()); if (PayAmt.signum() != 0) // explicit Amount { payment.setC_Currency_ID(C_Currency_ID); if (invoice.isSOTrx()) payment.setPayAmt(PayAmt); else // payment is likely to be negative payment.setPayAmt(PayAmt.negate()); payment.setOverUnderAmt(invoice.getGrandTotal(true).subtract(payment.getPayAmt())); } else // set Pay Amout from Invoice { payment.setC_Currency_ID(invoice.getC_Currency_ID()); payment.setPayAmt(invoice.getGrandTotal(true)); } } else if (C_BPartner_ID != 0) { payment.setC_BPartner_ID(C_BPartner_ID); payment.setC_Currency_ID(C_Currency_ID); if (PayAmt.signum() < 0) // Payment { payment.setPayAmt(PayAmt.abs()); payment.setC_DocType_ID(false); } else // Receipt { payment.setPayAmt(PayAmt); payment.setC_DocType_ID(true); } } else return null; payment.save(); // payment.processIt(MPayment.DOCACTION_Complete); payment.save(); return payment; } // createPayment } // BankStatementPayment
gpl-2.0
tauprojects/mpp
jmh/jmh-core-ct/src/test/java/org/openjdk/jmh/ct/states/PublicAbstractNestedTest.java
1707
/* * Copyright (c) 2005, 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package org.openjdk.jmh.ct.states; import org.junit.Test; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.ct.CompileTest; public class PublicAbstractNestedTest { @State(Scope.Benchmark) public abstract static class S { } @Benchmark public void test(S s) { } @Test public void compileTest() { CompileTest.assertFail(this.getClass()); } }
gpl-2.0
ekummerfeld/tetrad
tetrad-lib/src/main/java/edu/cmu/tetrad/bayes/MlBayesImObs.java
56895
/////////////////////////////////////////////////////////////////////////////// // For information as to what this class does, see the Javadoc, below. // // Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, // // 2007, 2008, 2009, 2010, 2014, 2015 by Peter Spirtes, Richard Scheines, Joseph // // Ramsey, and Clark Glymour. // // // // This program is free software; you can redistribute it and/or modify // // it under the terms of the GNU General Public License as published by // // the Free Software Foundation; either version 2 of the License, or // // (at your option) any later version. // // // // This program is distributed in the hope that it will be useful, // // but WITHOUT ANY WARRANTY; without even the implied warranty of // // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // // GNU General Public License for more details. // // // // You should have received a copy of the GNU General Public License // // along with this program; if not, write to the Free Software // // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // /////////////////////////////////////////////////////////////////////////////// package edu.cmu.tetrad.bayes; import edu.cmu.tetrad.data.ColtDataSet; import edu.cmu.tetrad.data.DataSet; import edu.cmu.tetrad.data.DiscreteVariable; import edu.cmu.tetrad.graph.*; import edu.cmu.tetrad.util.RandomUtil; import java.io.IOException; import java.io.ObjectInputStream; import java.util.*; /** * Stores a table of probabilities for a Bayes net and, together with BayesPm * and Dag, provides methods to manipulate this table. The division of labor is * as follows. The Dag is responsible for manipulating the basic graphical * structure of the Bayes net. Dag also stores and manipulates the names of the * nodes in the graph; there are no method in either BayesPm or BayesIm to do * this. BayesPm stores and manipulates the *categories* of each node in a DAG, * considered as a variable in a Bayes net. The number of categories for a * variable can be changed there as well as the names for those categories. This * class, BayesIm, stores the actual probability tables which are implied by the * structures in the other two classes. The implied parameters take the form of * conditional probabilities--e.g., P(N=v0|P1=v1, P2=v2, ...), for all nodes and * all combinations of their parent categories. The set of all such * probabilities is organized in this class as a three-dimensional table of * double values. The first dimension corresponds to the nodes in the Bayes * net. For each such node, the second dimension corresponds to a flat list of * combinations of parent categories for that node. The third dimension * corresponds to the list of categories for that node itself. Two methods * allow these values to be set and retrieved: <ul> <li>getWordRatio(int * nodeIndex, int rowIndex, int colIndex); and, <li>setProbability(int * nodeIndex, int rowIndex, int colIndex, int probability). </ul> To determine * the index of the node in question, use the method <ul> <li> getNodeIndex(Node * node). </ul> To determine the index of the row in question, use the method * <ul> <li>getRowIndex(int[] parentVals). </ul> To determine the order of the * parent values for a given node so that you can build the parentVals[] array, * use the method <ul> <li> getParents(int nodeIndex) </ul> To determine the * index of a category, use the method <ul> <li> getCategoryIndex(Node node) * </ul> in BayesPm. The rest of the methods in this class are easily * understood as variants of the methods above. </p> <p>Thanks to Pucktada * Treeratpituk, Frank Wimberly, and Willie Wheeler for advise and earlier * versions.</p> * * @author Joseph Ramsey jdramsey@andrew.cmu.edu */ public final class MlBayesImObs implements BayesIm { static final long serialVersionUID = 23L; private static final double ALLOWABLE_DIFFERENCE = 1.0e-10; /** * Inidicates that new rows in this BayesIm should be initialized as * unknowns, forcing them to be specified manually. This is the default. */ private static final int MANUAL = 0; /** * Indicates that new rows in this BayesIm should be initialized randomly. */ private static final int RANDOM = 1; /** * The associated Bayes PM model. * * @serial */ private BayesPm bayesPm; /** * The array of nodes from the graph. Order is important. * * @serial */ private Node[] nodes; /** * The list of parents for each node from the graph. Order or nodes * corresponds to the order of nodes in 'nodes', and order in subarrays is * important. * * @serial */ private int[][] parents; /** * The array of dimensionality (number of categories for each node) for each * of the subarrays of 'parents'. * * @serial */ private int[][] parentDims; /** * The main data structure; stores the values of all of the conditional * probabilities for the Bayes net of the form P(N=v0 | P1=v1, P2=v2,...). * The first dimension is the node N, in the order of 'nodes'. The second * dimension is the row index for the table of parameters associated with * node N; the third dimension is the column index. The row index is * calculated by the function getRowIndex(int[] values) where 'values' is an * array of numerical indices for each of the parent values; the order of * the values in this array is the same as the order of node in 'parents'; * the value indices are obtained from the Bayes PM for each node. The * column is the index of the value of N, where this index is obtained from * the Bayes PM. * * @serial */ private double[][][] probs; // this is left in for compatibility // joint probability table private StoredCellProbsObs jpd; // a regular MlBayesIm used to create randomized CPDs // so that the values can be marginalized to a consistent observed jpd private BayesIm bayesImRandomize; // BayesIm containing only the observed variables. Only used to // 1) construct propositions (mapped from the original allowUnfaithfulness bayesIm) // in Identifiability // 2) to avoid summing over rows in jpd when only the latent variables // have changed values (only sum when all the latent variables have // value 0) // This is a MlBayesIm instead of a MlBayesImObs because otherwise // there will be an infinite loop attempting to creating the MlBayesImObs private BayesIm bayesImObs; //===============================CONSTRUCTORS=========================// /** * Constructs a new BayesIm from the given BayesPm, initializing all values * as Double.NaN ("?"). * * @param bayesPm the given Bayes PM. Carries with it the underlying graph * model. * @throws IllegalArgumentException if the array of nodes provided is not a * permutation of the nodes contained in * the bayes parametric model provided. */ public MlBayesImObs(BayesPm bayesPm) throws IllegalArgumentException { //this(bayesPm, null, MANUAL); this(bayesPm, MANUAL); } /** * Constructs a new BayesIm from the given BayesPm, initializing values * either as MANUAL or RANDOM. If initialized manually, all values will be * set to Double.NaN ("?") in each row; if initialized randomly, all values * will distributed randomly in each row. * * @param bayesPm the given Bayes PM. Carries with it the * underlying graph model. * @param initializationMethod either MANUAL or RANDOM. * @throws IllegalArgumentException if the array of nodes provided is not a * permutation of the nodes contained in * the bayes parametric model provided. */ public MlBayesImObs(BayesPm bayesPm, int initializationMethod) throws IllegalArgumentException { //this(bayesPm, null, initializationMethod); if (bayesPm == null) { throw new NullPointerException("BayesPm must not be null."); } this.bayesPm = new BayesPm(bayesPm); // Get the nodes from the BayesPm. This fixes the order of the nodes // in the BayesIm, independently of any change to the BayesPm. // (This order must be maintained.) Graph graph = bayesPm.getDag(); this.nodes = graph.getNodes().toArray(new Node[graph.getNodes().size()]); // Initialize. initialize(null, initializationMethod); } /** * Constructs a new BayesIm from the given BayesPm, initializing values * either as MANUAL or RANDOM, but using values from the old BayesIm * provided where posssible. If initialized manually, all values that cannot * be retrieved from oldBayesIm will be set to Double.NaN ("?") in each such * row; if initialized randomly, all values that cannot be retrieved from * oldBayesIm will distributed randomly in each such row. * * @param bayesPm the given Bayes PM. Carries with it the * underlying graph model. * @param oldBayesIm an already-constructed BayesIm whose values * may be used where possible to initialize this * BayesIm. May be null. * @param initializationMethod either MANUAL or RANDOM. * @throws IllegalArgumentException if the array of nodes provided is not a * permutation of the nodes contained in * the bayes parametric model provided. */ public MlBayesImObs(BayesPm bayesPm, BayesIm oldBayesIm, int initializationMethod) throws IllegalArgumentException { if (bayesPm == null) { throw new NullPointerException("BayesPm must not be null."); } this.bayesPm = new BayesPm(bayesPm); // Get the nodes from the BayesPm. This fixes the order of the nodes // in the BayesIm, independently of any change to the BayesPm. // (This order must be maintained.) Graph graph = bayesPm.getDag(); this.nodes = graph.getNodes().toArray(new Node[graph.getNodes().size()]); // Initialize. initialize(oldBayesIm, initializationMethod); } /* * construct from a allowUnfaithfulness MlBayesIm using marginalized probaiblities, * or copy from another MlBayesImObs */ public MlBayesImObs(BayesIm bayesIm) throws IllegalArgumentException { if (bayesIm == null) { throw new NullPointerException("BayesIm must not be null."); } this.bayesPm = bayesIm.getBayesPm(); // Get the nodes from the BayesPm, fixing on an order. (This is // important; the nodes must always be in the same order for this // BayesIm.) this.nodes = new Node[bayesIm.getNumNodes()]; for (int i = 0; i < bayesIm.getNumNodes(); i++) { this.nodes[i] = bayesIm.getNode(i); } // Copy all the old values over. //initialize(bayesIm, MlBayesIm.MANUAL); initialize(bayesIm, MANUAL); } /** * Generates a simple exemplar of this class to test serialization. */ public static MlBayesImObs serializableInstance() { return new MlBayesImObs(BayesPm.serializableInstance()); } //===============================PUBLIC METHODS========================// /** * @return this PM. */ public BayesPm getBayesPm() { return bayesPm; } /** * @return the DAG. */ public Graph getDag() { return bayesPm.getDag(); } /** * @return the number of nodes in the model. */ public int getNumNodes() { return nodes.length; } /** * @return this node. */ public Node getNode(int nodeIndex) { return nodes[nodeIndex]; } /** * @param name the name of the node. * @return the node. */ public Node getNode(String name) { return getDag().getNode(name); } /** * @param node the given node. * @return the index for that node, or -1 if the node is not in the * BayesIm. */ public int getNodeIndex(Node node) { for (int i = 0; i < nodes.length; i++) { if (node == nodes[i]) { return i; } } return -1; } public List<Node> getVariables() { List<Node> variables = new LinkedList<>(); for (int i = 0; i < getNumNodes(); i++) { Node node = getNode(i); variables.add(bayesPm.getVariable(node)); } return variables; } /** * @return the list of measured variableNodes. */ public List<Node> getMeasuredNodes() { return bayesPm.getMeasuredNodes(); } public List<String> getVariableNames() { List<String> variableNames = new LinkedList<>(); for (int i = 0; i < getNumNodes(); i++) { Node node = getNode(i); variableNames.add(bayesPm.getVariable(node).getName()); } return variableNames; } /** * @return this number. * @see #getNumRows */ public int getNumColumns(int nodeIndex) { return probs[nodeIndex][0].length; } /** * @return this number. * @see #getRowIndex * @see #getNumColumns */ public int getNumRows(int nodeIndex) { return probs[nodeIndex].length; } /** * @param nodeIndex the given node. * @return the number of parents for this node. */ public int getNumParents(int nodeIndex) { return parents[nodeIndex].length; } /** * @return the given parent of the given node. */ public int getParent(int nodeIndex, int parentIndex) { return parents[nodeIndex][parentIndex]; } /** * @return the dimension of the given parent for the given node. */ public int getParentDim(int nodeIndex, int parentIndex) { return parentDims[nodeIndex][parentIndex]; } /** * @return this array of parent dimensions. * @see #getParents */ public int[] getParentDims(int nodeIndex) { int[] dims = parentDims[nodeIndex]; int[] copy = new int[dims.length]; System.arraycopy(dims, 0, copy, 0, dims.length); return copy; } /** * @return (a defensive copy of) the array containing all of the parents of * a given node in the order in which they are stored internally. * @see #getParentDims */ public int[] getParents(int nodeIndex) { int[] nodeParents = parents[nodeIndex]; int[] copy = new int[nodeParents.length]; System.arraycopy(nodeParents, 0, copy, 0, nodeParents.length); return copy; } /** * @param nodeIndex the index of the node. * @param rowIndex the index of the row in question. * @return the array representing the combination of parent values for this * row. * @see #getNodeIndex * @see #getRowIndex */ public int[] getParentValues(int nodeIndex, int rowIndex) { int[] dims = getParentDims(nodeIndex); int[] values = new int[dims.length]; for (int i = dims.length - 1; i >= 0; i--) { values[i] = rowIndex % dims[i]; rowIndex /= dims[i]; } return values; } /** * @return the value in the probability table for the given node, at the * given row and column. */ public int getParentValue(int nodeIndex, int rowIndex, int colIndex) { return getParentValues(nodeIndex, rowIndex)[colIndex]; } /** * @param nodeIndex the index of the node in question. * @param rowIndex the row in the table for this for node which represents * the combination of parent values in question. * @param colIndex the column in the table for this node which represents * the value of the node in question. * @return the probability stored for this parameter. * @see #getNodeIndex * @see #getRowIndex */ public double getProbability(int nodeIndex, int rowIndex, int colIndex) { return probs[nodeIndex][rowIndex][colIndex]; } /** * @return the row in the table for the given node and combination of parent * values. * @see #getParentValues */ public int getRowIndex(int nodeIndex, int[] values) { int[] dim = getParentDims(nodeIndex); int rowIndex = 0; for (int i = 0; i < dim.length; i++) { rowIndex *= dim[i]; rowIndex += values[i]; } return rowIndex; } /** * Normalizes all rows in the tables associated with each of node in turn. */ public void normalizeAll() { for (int nodeIndex = 0; nodeIndex < nodes.length; nodeIndex++) { normalizeNode(nodeIndex); } } /** * Normalizes all rows in the table associated with a given node. */ public void normalizeNode(int nodeIndex) { for (int rowIndex = 0; rowIndex < getNumRows(nodeIndex); rowIndex++) { normalizeRow(nodeIndex, rowIndex); } } /** * Normalizes the given row. */ public void normalizeRow(int nodeIndex, final int rowIndex) { final int numColumns = getNumColumns(nodeIndex); double total = 0.0; for (int colIndex = 0; colIndex < numColumns; colIndex++) { total += getProbability(nodeIndex, rowIndex, colIndex); } if (total != 0.0) { for (int colIndex = 0; colIndex < numColumns; colIndex++) { double probability = getProbability(nodeIndex, rowIndex, colIndex); double prob = probability / total; setProbability(nodeIndex, rowIndex, colIndex, prob); } } else { double prob = 1.0 / numColumns; for (int colIndex = 0; colIndex < numColumns; colIndex++) { setProbability(nodeIndex, rowIndex, colIndex, prob); } } } /** * Sets the probability for the given node at a given row and column in the * table for that node. To get the node index, use getNodeIndex(). To get * the row index, use getRowIndex(). To get the column index, use * getCategoryIndex() from the underlying BayesPm(). The value returned * will represent a conditional probability of the form P(N=v0 | P1=v1, * P2=v2, ... , Pn=vn), where N is the node referenced by nodeIndex, v0 is * the value referenced by colIndex, and the combination of parent values * indicated is the combination indicated by rowIndex. * * @param nodeIndex the index of the node in question. * @param rowIndex the row in the table for this for node which represents * the combination of parent values in question. * @param colIndex the column in the table for this node which represents * the value of the node in question. * @param value the desired probability to be set. * @see #getProbability */ public void setProbability(int nodeIndex, int rowIndex, int colIndex, double value) { if (colIndex >= getNumColumns(nodeIndex)) { throw new IllegalArgumentException("Column out of range: " + colIndex + " >= " + getNumColumns(nodeIndex)); } if (!(0.0 <= value && value <= 1.0) && !Double.isNaN(value)) { throw new IllegalArgumentException("Probability value must be " + "between 0.0 and 1.0 or Double.NaN."); } probs[nodeIndex][rowIndex][colIndex] = value; } /** * @return the index of the node with the given name in the specified * BayesIm. */ public int getCorrespondingNodeIndex(int nodeIndex, BayesIm otherBayesIm) { String nodeName = getNode(nodeIndex).getName(); Node oldNode = otherBayesIm.getNode(nodeName); return otherBayesIm.getNodeIndex(oldNode); } /** * Assigns random probability values to the child values of this row that * add to 1. * * @param nodeIndex the node for the table that this row belongs to. * @param rowIndex the index of the row. */ public void clearRow(int nodeIndex, int rowIndex) { for (int colIndex = 0; colIndex < getNumColumns(nodeIndex); colIndex++) { setProbability(nodeIndex, rowIndex, colIndex, Double.NaN); } } /** * Assigns random probability values to the child values of this row that * add to 1. * * @param nodeIndex the node for the table that this row belongs to. * @param rowIndex the index of the row. */ public void randomizeRow(int nodeIndex, int rowIndex) { final int size = getNumColumns(nodeIndex); probs[nodeIndex][rowIndex] = getRandomWeights(size); } /** * Randomizes any row in the table for the given node index that has a * Double.NaN value in it. * * @param nodeIndex the node for the table whose incomplete rows are to be * randomized. */ public void randomizeIncompleteRows(int nodeIndex) { for (int rowIndex = 0; rowIndex < getNumRows(nodeIndex); rowIndex++) { if (isIncomplete(nodeIndex, rowIndex)) { randomizeRow(nodeIndex, rowIndex); } } } /** * Randomizes every row in the table for the given node index. * * @param nodeIndex the node for the table to be randomized. */ public void randomizeTable(int nodeIndex) { for (int rowIndex = 0; rowIndex < getNumRows(nodeIndex); rowIndex++) { randomizeRow(nodeIndex, rowIndex); } // randomizeTable2(nodeIndex); } // private void randomizeTable2(int nodeIndex) { // for (int rowIndex = 0; rowIndex < getNumRows(nodeIndex); rowIndex++) { // if (isIncomplete(nodeIndex, rowIndex)) { // break; // } // } // // // Trying for some more power ..jdramsey 5/7/10 // List<Integer> rowIndices = new ArrayList<>(); // // for (int i = 0; i < getNumRows(nodeIndex); i++) { // rowIndices.add(i); // } // // Collections.shuffle(rowIndices); // // randomizeRow(nodeIndex, rowIndices.get(0)); // double[][] values = new double[getNumRows(nodeIndex)][getNumColumns(nodeIndex)]; // // for (int row = 0; row < getNumRows(nodeIndex); row++) { // double bestNorm = 0.0; // // for (int trial = 0; trial < 100; trial++) { // randomizeRow(nodeIndex, rowIndices.get(row)); // double totalNorm = 0.0; // // for (int _row = row - 1; _row < row; _row++) { // double norm = norm(nodeIndex, rowIndices.get(row), // rowIndices.get(_row)); // totalNorm += norm; // } // // if (totalNorm > bestNorm) { // bestNorm = totalNorm; // // for (int _row = 0; _row < getNumRows(nodeIndex); _row++) { // for (int col = 0; col < getNumColumns(nodeIndex); col++) { // values[_row][col] = getProbability(nodeIndex, _row, col); // } // } // } // } // // for (int _row = 0; _row < getNumRows(nodeIndex); _row++) { // for (int col = 0; col < getNumColumns(nodeIndex); col++) { // setProbability(nodeIndex, _row, col, values[_row][col]); // } // } // } // } // private double totalNorm(int nodeIndex, int parent, int cat1, int cat2) { // double[] sumProbs1 = new double[getNumColumns(nodeIndex)]; // double[] sumProbs2 = new double[getNumColumns(nodeIndex)]; // // for (int row = 0; row < getNumRows(nodeIndex); row++) { // for (int col = 0; col < getNumColumns(nodeIndex); col++) { // if (getParentValues(nodeIndex, row)[parent] == cat1) { // sumProbs1[col] += getProbability(nodeIndex, row, col); // } // } // } // // for (int row = 0; row < getNumRows(nodeIndex); row++) { // for (int col = 0; col < getNumColumns(nodeIndex); col++) { // if (getParentValues(nodeIndex, row)[parent] == cat2) { // sumProbs2[col] += getProbability(nodeIndex, row, col); // } // } // } // // double norm = 0.0; // // for (int col = 0; col < getNumColumns(nodeIndex); col++) { // double value1 = sumProbs1[col]; // double value2 = sumProbs2[col]; // double diff = value1 - value2; // double absNorm = Math.abs(diff); // norm += absNorm; // } // // return norm; // } // private double norm(int nodeIndex, int row1, int row2) { // double norm = 0.0; // // for (int col = 0; col < getNumColumns(nodeIndex); col++) { // double value1 = getProbability(nodeIndex, row1, col); // double value2 = getProbability(nodeIndex, row2, col); // double diff = value1 - value2; // double absNorm = Math.abs(diff); // // norm += diff * diff; // norm += absNorm; // } // return norm; // } /** * Randomizes every row in the table for the given node index. * * @param nodeIndex the node for the table to be randomized. */ public void clearTable(int nodeIndex) { for (int rowIndex = 0; rowIndex < getNumRows(nodeIndex); rowIndex++) { clearRow(nodeIndex, rowIndex); } } /** * @return true iff one of the values in the given row is Double.NaN. */ public boolean isIncomplete(int nodeIndex, int rowIndex) { for (int colIndex = 0; colIndex < getNumColumns(nodeIndex); colIndex++) { double p = getProbability(nodeIndex, rowIndex, colIndex); if (Double.isNaN(p)) { return true; } } return false; } /** * @return true iff any value in the table for the given node is * Double.NaN. */ public boolean isIncomplete(int nodeIndex) { for (int rowIndex = 0; rowIndex < getNumRows(nodeIndex); rowIndex++) { if (isIncomplete(nodeIndex, rowIndex)) { return true; } } return false; } /** * Simulates a sample with the given sample size. * * @param sampleSize the sample size. * @return the simulated sample as a DataSet. */ public DataSet simulateData(int sampleSize, boolean latentDataSaved) { if (getBayesPm().getDag().isTimeLagModel()) { return simulateTimeSeries(sampleSize); } return simulateDataHelper(sampleSize, latentDataSaved); } public DataSet simulateData(DataSet dataSet, boolean latentDataSaved) { return simulateDataHelper(dataSet, latentDataSaved); } private DataSet simulateTimeSeries(int sampleSize) { TimeLagGraph timeSeriesGraph = getBayesPm().getDag().getTimeLagGraph(); List<Node> variables = new ArrayList<>(); for (Node node : timeSeriesGraph.getLag0Nodes()) { variables.add(new DiscreteVariable(timeSeriesGraph.getNodeId(node).getName())); } List<Node> lag0Nodes = timeSeriesGraph.getLag0Nodes(); DataSet fullData = new ColtDataSet(sampleSize, variables); Graph contemporaneousDag = timeSeriesGraph.subgraph(lag0Nodes); List<Node> tierOrdering = contemporaneousDag.getCausalOrdering(); int[] tiers = new int[tierOrdering.size()]; for (int i = 0; i < tierOrdering.size(); i++) { tiers[i] = getNodeIndex(tierOrdering.get(i)); } // Construct the sample. int[] combination = new int[tierOrdering.size()]; for (int i = 0; i < sampleSize; i++) { int[] point = new int[nodes.length]; for (int nodeIndex : tiers) { double cutoff = RandomUtil.getInstance().nextDouble(); for (int k = 0; k < getNumParents(nodeIndex); k++) { combination[k] = point[getParent(nodeIndex, k)]; } int rowIndex = getRowIndex(nodeIndex, combination); double sum = 0.0; for (int k = 0; k < getNumColumns(nodeIndex); k++) { double probability = getProbability(nodeIndex, rowIndex, k); if (Double.isNaN(probability)) { throw new IllegalStateException("Some probability " + "values in the BayesIm are not filled in; " + "cannot simulate data."); } sum += probability; if (sum >= cutoff) { point[nodeIndex] = k; break; } } } } return fullData; } /** * Simulates a sample with the given sample size. * * @param sampleSize the sample size. * @param seed the random number generator seed allows you * recreate the simulated data by passing in the same * seed (so you don't have to store the sample data * @return the simulated sample as a DataSet. */ public DataSet simulateData(int sampleSize, long seed, boolean latentDataSaved) { RandomUtil random = RandomUtil.getInstance(); long _seed = random.getSeed(); random.setSeed(seed); DataSet dataSet = simulateData(sampleSize, latentDataSaved); random.revertSeed(_seed); return dataSet; } public DataSet simulateData(DataSet dataSet, long seed, boolean latentDataSaved) { RandomUtil random = RandomUtil.getInstance(); random.setSeed(seed); return simulateDataHelper(dataSet, latentDataSaved); } /** * Simulates a sample with the given sample size. * * @param sampleSize the sample size. * @return the simulated sample as a DataSet. */ private DataSet simulateDataHelper(int sampleSize, boolean latentDataSaved) { int numMeasured = 0; int[] map = new int[nodes.length]; List<Node> variables = new LinkedList<>(); for (int j = 0; j < nodes.length; j++) { if (!latentDataSaved && nodes[j].getNodeType() != NodeType.MEASURED) { continue; } int numCategories = bayesPm.getNumCategories(nodes[j]); List<String> categories = new LinkedList<>(); for (int k = 0; k < numCategories; k++) { categories.add(bayesPm.getCategory(nodes[j], k)); } DiscreteVariable var = new DiscreteVariable(nodes[j].getName(), categories); variables.add(var); int index = ++numMeasured - 1; map[index] = j; } DataSet dataSet = new ColtDataSet(sampleSize, variables); constructSample(sampleSize, numMeasured, dataSet, map); return dataSet; } /** * Constructs a random sample using the given already allocated data set, to * avoid allocating more memory. */ private DataSet simulateDataHelper(DataSet dataSet, boolean latentDataSaved) { if (dataSet.getNumColumns() != nodes.length) { throw new IllegalArgumentException("When rewriting the old data set, " + "number of variables in data set must equal number of variables " + "in Bayes net."); } int sampleSize = dataSet.getNumRows(); int numMeasured = 0; int[] map = new int[nodes.length]; List<Node> variables = new LinkedList<>(); for (int j = 0; j < nodes.length; j++) { if (!latentDataSaved && nodes[j].getNodeType() != NodeType.MEASURED) { continue; } int numCategories = bayesPm.getNumCategories(nodes[j]); List<String> categories = new LinkedList<>(); for (int k = 0; k < numCategories; k++) { categories.add(bayesPm.getCategory(nodes[j], k)); } DiscreteVariable var = new DiscreteVariable(nodes[j].getName(), categories); variables.add(var); int index = ++numMeasured - 1; map[index] = j; } for (int i = 0; i < variables.size(); i++) { Node node = dataSet.getVariable(i); Node _node = variables.get(i); dataSet.changeVariable(node, _node); } constructSample(sampleSize, numMeasured, dataSet, map); return dataSet; } private void constructSample(int sampleSize, int numMeasured, DataSet dataSet, int[] map) { // Get a tier ordering and convert it to an int array. Graph graph = getBayesPm().getDag(); Dag dag = new Dag(graph); List<Node> tierOrdering = dag.getCausalOrdering(); int[] tiers = new int[tierOrdering.size()]; for (int i = 0; i < tierOrdering.size(); i++) { tiers[i] = getNodeIndex(tierOrdering.get(i)); } // Construct the sample. int[] combination = new int[nodes.length]; for (int i = 0; i < sampleSize; i++) { int[] point = new int[nodes.length]; for (int nodeIndex : tiers) { double cutoff = RandomUtil.getInstance().nextDouble(); for (int k = 0; k < getNumParents(nodeIndex); k++) { combination[k] = point[getParent(nodeIndex, k)]; } int rowIndex = getRowIndex(nodeIndex, combination); double sum = 0.0; for (int k = 0; k < getNumColumns(nodeIndex); k++) { double probability = getProbability(nodeIndex, rowIndex, k); if (Double.isNaN(probability)) { throw new IllegalStateException("Some probability " + "values in the BayesIm are not filled in; " + "cannot simulate data."); } sum += probability; if (sum >= cutoff) { point[nodeIndex] = k; break; } } } for (int j = 0; j < numMeasured; j++) { dataSet.setInt(i, j, point[map[j]]); } } } public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof BayesIm)) { return false; } BayesIm otherIm = (BayesIm) o; if (getNumNodes() != otherIm.getNumNodes()) { return false; } for (int i = 0; i < getNumNodes(); i++) { int otherIndex = otherIm.getCorrespondingNodeIndex(i, otherIm); if (otherIndex == -1) { return false; } if (getNumColumns(i) != otherIm.getNumColumns(otherIndex)) { return false; } if (getNumRows(i) != otherIm.getNumRows(otherIndex)) { return false; } for (int j = 0; j < getNumRows(i); j++) { for (int k = 0; k < getNumColumns(i); k++) { double prob = getProbability(i, j, k); double otherProb = otherIm.getProbability(i, j, k); if (Double.isNaN(prob) && Double.isNaN(otherProb)) { continue; } if (Math.abs(prob - otherProb) > ALLOWABLE_DIFFERENCE) { return false; } } } } return true; } /** * Prints out the probability table for each variable. */ public String toString() { /* for (int i = 0; i < getNumNodes(); i++) { buf.append("\n\nNode: ").append(getNode(i)); if (getNumParents(i) == 0) { buf.append("\n"); } else { buf.append("\n\n"); for (int k = 0; k < getNumParents(i); k++) { buf.append(getNode(getParent(i, k))).append("\t"); } } for (int j = 0; j < getNumRows(i); j++) { buf.append("\n"); for (int k = 0; k < getNumParents(i); k++) { buf.append(getParentValue(i, j, k)); if (k < getNumParents(i) - 1) { buf.append("\t"); } } if (getNumParents(i) > 0) { buf.append("\t"); } for (int k = 0; k < getNumColumns(i); k++) { buf.append(nf.format(getProbability(i, j, k))).append("\t"); } } } */ return "MlBayesImObs\n"; } /////////////////////////////////////////////////////// // methods added for MlBayesImObs /////////////////////////////////////////////////////// // public BayesIm getBayesImRandomize() { // return bayesImRandomize; // } public BayesIm getBayesImObs() { return bayesImObs; } public StoredCellProbsObs getJPD() { return jpd; } public int getNumRows() { return jpd.getNumRows(); } // translate rowIndex into the variable values public int[] getRowValues(int rowIndex) { return jpd.getVariableValues(rowIndex); } public double getProbability(int rowIndex) { return jpd.getCellProb(getRowValues(rowIndex)); } public void setProbability(int rowIndex, double value) { if (!(0.0 <= value && value <= 1.0) && !Double.isNaN(value)) { throw new IllegalArgumentException("Probability value must be " + "between 0.0 and 1.0 or Double.NaN."); } jpd.setCellProbability(getRowValues(rowIndex), value); } public void createRandomCellTable() { for (int nodeIndex = 0; nodeIndex < nodes.length; nodeIndex++) { bayesImRandomize.randomizeTable(nodeIndex); } jpd.createCellTable((MlBayesIm) bayesImRandomize); } //=============================PRIVATE METHODS=======================// /////////////////////////////////////// // initialization: the JPD and a BayesIm with only the observed variables // the data structure for the CPD are left in for compatibility private void initialize(BayesIm oldBayesIm, int initializationMethod) { parents = new int[this.nodes.length][]; parentDims = new int[this.nodes.length][]; probs = new double[this.nodes.length][][]; // initialize parents, parentDims, probs, even if probs is not used for (int nodeIndex = 0; nodeIndex < this.nodes.length; nodeIndex++) { initializeNode(nodeIndex); } /////////////////////////////////////////////////////////////////////// // used for randomizing the jpd this.bayesImRandomize = new MlBayesIm(bayesPm); /////////////////////////////////////////////////////////////////////// // construct a BayesIm with only observed variables // This is used for making Proposition with only the observed variables Dag dag = new Dag(bayesPm.getDag()); for (Node node : this.nodes) { if (node.getNodeType() == NodeType.LATENT) { dag.removeNode(node); } } BayesPm bayesPmObs = new BayesPm(dag, bayesPm); // not a MlBayesImObs to avoid an infinite loop of constructing // an MlBayesImObs inside an MlBayesImObs this.bayesImObs = new MlBayesIm(bayesPmObs); /////////////////////////////////////////////////////////////////////// // construct the jpd List<Node> obsNodes = new ArrayList<>(); for (Node node1 : nodes) { Node node = bayesPm.getVariable(node1); if (node.getNodeType() == NodeType.MEASURED) { obsNodes.add(node); } } // this does not work: different ordering of nodes // graph is the DAG restricted to only observed variables //List<Node> obsNodes = bayesPmObs.getVariables(); this.jpd = new StoredCellProbsObs(obsNodes); // this does not work: different ordering of nodes //jpd = new StoredCellProbsObs(getMeasuredNodes()); /////////////////////////////////////////////////////////////////////// // initialize the jpd if (initializationMethod == RANDOM) { // this does not work: assigning arbitrary random values to the jpd // will violate the constraints imposed by the graphical structure //jpd.createRandomCellTable(); if (oldBayesIm == null) { createRandomCellTable(); } else if (oldBayesIm.getClass().getSimpleName().equals("MlBayesIm")) { jpd.createCellTable((MlBayesIm) oldBayesIm); } else if (oldBayesIm.getClass().getSimpleName().equals("MlBayesImObs")) { if (bayesPm.equals(oldBayesIm.getBayesPm())) { this.jpd.createCellTable((MlBayesImObs) oldBayesIm); } else { createRandomCellTable(); } } } else if (initializationMethod == MANUAL) { if (oldBayesIm == null) { jpd.clearCellTable(); } else if (oldBayesIm.getClass().getSimpleName().equals("MlBayesIm")) { jpd.createCellTable((MlBayesIm) oldBayesIm); } else if (oldBayesIm.getClass().getSimpleName().equals("MlBayesImObs")) { if (bayesPm.equals(oldBayesIm.getBayesPm())) { this.jpd.createCellTable((MlBayesImObs) oldBayesIm); } else { jpd.clearCellTable(); } } } else { throw new IllegalArgumentException("Unrecognized state."); } } /** * This method initializes the node indicated. */ private void initializeNode(int nodeIndex) { Node node = nodes[nodeIndex]; // Set up parents array. Should store the parents of // each node as ints in a particular order. Graph graph = getBayesPm().getDag(); List<Node> parentList = graph.getParents(node); int[] parentArray = new int[parentList.size()]; for (int i = 0; i < parentList.size(); i++) { parentArray[i] = getNodeIndex(parentList.get(i)); } // Sort parent array. Arrays.sort(parentArray); parents[nodeIndex] = parentArray; // Setup dimensions array for parents. int[] dims = new int[parentArray.length]; for (int i = 0; i < dims.length; i++) { Node parNode = nodes[parentArray[i]]; dims[i] = getBayesPm().getNumCategories(parNode); } // Calculate dimensions of table. int numRows = 1; for (int dim : dims) { if (numRows > 1000000 /* Integer.MAX_VALUE / dim*/) { throw new IllegalArgumentException( "The number of rows in the " + "conditional probability table for " + nodes[nodeIndex] + " is greater than 1,000,000 and cannot be " + "represented."); } numRows *= dim; } int numCols = getBayesPm().getNumCategories(node); parentDims[nodeIndex] = dims; probs[nodeIndex] = new double[numRows][numCols]; /* // Initialize each row. for (int rowIndex = 0; rowIndex < numRows; rowIndex++) { if (oldBayesIm == null ) { overwriteRow(nodeIndex, rowIndex, initializationMethod); } else { retainOldRowIfPossible(nodeIndex, rowIndex, oldBayesIm, initializationMethod); } } */ } // private void overwriteRow(int nodeIndex, int rowIndex, // int initializationMethod) { // if (initializationMethod == RANDOM) { // randomizeRow(nodeIndex, rowIndex); // } else if (initializationMethod == MANUAL) { // initializeRowAsUnknowns(nodeIndex, rowIndex); // } else { // throw new IllegalArgumentException("Unrecognized state."); // } // } /** * This method chooses random probabilities for a row which add up to 1.0. * Random doubles are drawn from a random distribution, and the final row is * then normalized. * * @param size the length of the row. * @return an array with randomly distributed probabilities of this length. * @see #randomizeRow */ private static double[] getRandomWeights(int size) { assert size >= 0; double[] row = new double[size]; double sum = 0.0; // If I put most of the mass in each row on one of the categories, // I get lovely classification results for Bayes nets with all // 4-category variables. To include a bias, set 'bias' to a positive // number. double bias = 0; int randomCell = RandomUtil.getInstance().nextInt(size); // if (RandomUtil.getInstance().nextDouble() < 0.5) { // randomCell = -1; // } for (int i = 0; i < size; i++) { row[i] = RandomUtil.getInstance().nextDouble(); if (i == randomCell) { row[i] += bias; } sum += row[i]; } for (int i = 0; i < size; i++) { row[i] /= sum; } return row; } // private void initializeRowAsUnknowns(int nodeIndex, int rowIndex) { // final int size = getNumColumns(nodeIndex); // double[] row = new double[size]; // Arrays.fill(row, Double.NaN); // probs[nodeIndex][rowIndex] = row; // } // /** // * This method initializes the node indicated. // */ // private void retainOldRowIfPossible(int nodeIndex, int rowIndex, // BayesIm oldBayesIm, int initializationMethod) { // // Set<Node> newParents = new HashSet<Node>(getBayesPm().getDag().getParents(node)); // // Set<Node> oldParents = new HashSet<Node>(oldBayesIm.getBayesPm().getDag().getParents(node)); // // int method = newParents == oldParents ? initializationMethod : MlBayesIm.MANUAL; // // // int oldNodeIndex = getCorrespondingNodeIndex(nodeIndex, oldBayesIm); // // if (oldNodeIndex == -1) { // overwriteRow(nodeIndex, rowIndex, initializationMethod); // } else if (getNumColumns(nodeIndex) != oldBayesIm.getNumColumns(oldNodeIndex)) { // overwriteRow(nodeIndex, rowIndex, initializationMethod); // // } else if (parentsChanged(nodeIndex, this, oldBayesIm)) { // // overwriteRow(nodeIndex, rowIndex, initializationMethod); // } else { // int oldRowIndex = getUniqueCompatibleOldRow(nodeIndex, rowIndex, oldBayesIm); // // if (oldRowIndex >= 0) { // copyValuesFromOldToNew(oldNodeIndex, oldRowIndex, nodeIndex, // rowIndex, oldBayesIm); // } else { // overwriteRow(nodeIndex, rowIndex, initializationMethod); // } // } // } // // private boolean parentsChanged(int nodeIndex, BayesIm bayesIm, BayesIm oldBayesIm) { // int[] dims = bayesIm.getParents(nodeIndex); // int[] oldDims = oldBayesIm.getParents(nodeIndex); // // if (dims.length != oldDims.length) { // return false; // } // // for (int i = 0; i < dims.length; i++) { // if (dims[i] != oldDims[i]) { // return false; // } // } // // return true; // } // /** // * @return the unique rowIndex in the old BayesIm for the given node that is // * compatible with the given rowIndex in the new BayesIm for that node, if // * one exists. Otherwise, returns -1. A compatible rowIndex is one in which // * all the parents that the given node has in common between the old BayesIm // * and the new BayesIm are assigned the values they have in the new // * rowIndex. If a parent node is removed in the new BayesIm, there may be // * more than one such compatible rowIndex in the old BayesIm, in which case // * -1 is returned. Likewise, there may be no compatible rows, in which case // * -1 is returned. // */ // private int getUniqueCompatibleOldRow(int nodeIndex, int rowIndex, // BayesIm oldBayesIm) { // int oldNodeIndex = getCorrespondingNodeIndex(nodeIndex, oldBayesIm); // int oldNumParents = oldBayesIm.getNumParents(oldNodeIndex); // // int[] oldParentValues = new int[oldNumParents]; // Arrays.fill(oldParentValues, -1); // // int[] parentValues = getParentValues(nodeIndex, rowIndex); // // // Go through each parent of the node in the new BayesIm. // for (int i = 0; i < getNumParents(nodeIndex); i++) { // // // Get the index of the parent in the new graph and in the old // // graph. If it's no longer in the new graph, skip to the next // // parent. // int parentNodeIndex = getParent(nodeIndex, i); // int oldParentNodeIndex = // getCorrespondingNodeIndex(parentNodeIndex, oldBayesIm); // int oldParentIndex = -1; // // for (int j = 0; j < oldBayesIm.getNumParents(oldNodeIndex); j++) { // if (oldParentNodeIndex == oldBayesIm.getParent(oldNodeIndex, j)) { // oldParentIndex = j; // break; // } // } // // if (oldParentIndex == -1 || // oldParentIndex >= oldBayesIm.getNumParents(oldNodeIndex)) { // return -1; // } // // // Look up that value index for the new BayesIm for that parent. // // If it was a valid value index in the old BayesIm, record // // that value in oldParentValues. Otherwise return -1. // int newParentValue = parentValues[i]; // int oldParentDim = // oldBayesIm.getParentDim(oldNodeIndex, oldParentIndex); // // if (newParentValue < oldParentDim) { // oldParentValues[oldParentIndex] = newParentValue; // } else { // return -1; // } // } // // // // Go through each parent of the node in the new BayesIm. // // for (int i = 0; i < oldBayesIm.getNumParents(oldNodeIndex); i++) { // // // // // Get the index of the parent in the new graph and in the old // // // graph. If it's no longer in the new graph, skip to the next // // // parent. // // int oldParentNodeIndex = oldBayesIm.getParent(oldNodeIndex, i); // // int parentNodeIndex = // // oldBayesIm.getCorrespondingNodeIndex(oldParentNodeIndex, this); // // int parentIndex = -1; // // // // for (int j = 0; j < this.getNumParents(nodeIndex); j++) { // // if (parentNodeIndex == this.getParent(nodeIndex, j)) { // // parentIndex = j; // // break; // // } // // } // // // // if (parentIndex == -1 || // // parentIndex >= this.getNumParents(nodeIndex)) { // // continue; // // } // // // // // Look up that value index for the new BayesIm for that parent. // // // If it was a valid value index in the old BayesIm, record // // // that value in oldParentValues. Otherwise return -1. // // int parentValue = oldParentValues[i]; // // int parentDim = // // this.getParentDim(nodeIndex, parentIndex); // // // // if (parentValue < parentDim) { // // oldParentValues[parentIndex] = oldParentValue; // // } else { // // return -1; // // } // // } // // // If there are any -1's in the combination at this point, return -1. // for (int oldParentValue : oldParentValues) { // if (oldParentValue == -1) { // return -1; // } // } // // // Otherwise, return the combination, which will be a row in the // // old BayesIm. // return oldBayesIm.getRowIndex(oldNodeIndex, oldParentValues); // } // private void copyValuesFromOldToNew(int oldNodeIndex, int oldRowIndex, // int nodeIndex, int rowIndex, BayesIm oldBayesIm) { // if (getNumColumns(nodeIndex) != oldBayesIm.getNumColumns(oldNodeIndex)) { // throw new IllegalArgumentException("It's only possible to copy " + // "one row of probability values to another in a Bayes IM " + // "if the number of columns in the table are the same."); // } // // for (int colIndex = 0; colIndex < getNumColumns(nodeIndex); colIndex++) { // double prob = oldBayesIm.getProbability(oldNodeIndex, oldRowIndex, // colIndex); // setProbability(nodeIndex, rowIndex, colIndex, prob); // } // } /** * Adds semantic checks to the default deserialization method. This method * must have the standard signature for a readObject method, and the body of * the method must begin with "s.defaultReadObject();". Other than that, any * semantic checks can be specified and do not need to stay the same from * version to version. A readObject method of this form may be added to any * class, even if Tetrad sessions were previously saved out using a version * of the class that didn't include it. (That's what the * "s.defaultReadObject();" is for. See J. Bloch, Effective Java, for help. * * @throws java.io.IOException * @throws ClassNotFoundException */ private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { s.defaultReadObject(); if (bayesPm == null) { throw new NullPointerException(); } if (nodes == null) { throw new NullPointerException(); } if (parents == null) { throw new NullPointerException(); } if (parentDims == null) { throw new NullPointerException(); } if (probs == null) { throw new NullPointerException(); } } }
gpl-2.0
thermatk/Telegram-FOSS
TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/trackselection/AdaptiveTrackSelection.java
11546
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.telegram.messenger.exoplayer2.trackselection; import android.os.SystemClock; import org.telegram.messenger.exoplayer2.C; import org.telegram.messenger.exoplayer2.Format; import org.telegram.messenger.exoplayer2.source.TrackGroup; import org.telegram.messenger.exoplayer2.source.chunk.MediaChunk; import org.telegram.messenger.exoplayer2.upstream.BandwidthMeter; import java.util.List; /** * A bandwidth based adaptive {@link TrackSelection}, whose selected track is updated to be the one * of highest quality given the current network conditions and the state of the buffer. */ public class AdaptiveTrackSelection extends BaseTrackSelection { /** * Factory for {@link AdaptiveTrackSelection} instances. */ public static final class Factory implements TrackSelection.Factory { private final BandwidthMeter bandwidthMeter; private final int maxInitialBitrate; private final int minDurationForQualityIncreaseMs; private final int maxDurationForQualityDecreaseMs; private final int minDurationToRetainAfterDiscardMs; private final float bandwidthFraction; /** * @param bandwidthMeter Provides an estimate of the currently available bandwidth. */ public Factory(BandwidthMeter bandwidthMeter) { this (bandwidthMeter, DEFAULT_MAX_INITIAL_BITRATE, DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION); } /** * @param bandwidthMeter Provides an estimate of the currently available bandwidth. * @param maxInitialBitrate The maximum bitrate in bits per second that should be assumed * when a bandwidth estimate is unavailable. * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for * the selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for * the selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can * be discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account * for inaccuracies in the bandwidth estimator. */ public Factory(BandwidthMeter bandwidthMeter, int maxInitialBitrate, int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction) { this.bandwidthMeter = bandwidthMeter; this.maxInitialBitrate = maxInitialBitrate; this.minDurationForQualityIncreaseMs = minDurationForQualityIncreaseMs; this.maxDurationForQualityDecreaseMs = maxDurationForQualityDecreaseMs; this.minDurationToRetainAfterDiscardMs = minDurationToRetainAfterDiscardMs; this.bandwidthFraction = bandwidthFraction; } @Override public AdaptiveTrackSelection createTrackSelection(TrackGroup group, int... tracks) { return new AdaptiveTrackSelection(group, tracks, bandwidthMeter, maxInitialBitrate, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bandwidthFraction); } } public static final int DEFAULT_MAX_INITIAL_BITRATE = 800000; public static final int DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS = 10000; public static final int DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS = 25000; public static final int DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS = 25000; public static final float DEFAULT_BANDWIDTH_FRACTION = 0.75f; private final BandwidthMeter bandwidthMeter; private final int maxInitialBitrate; private final long minDurationForQualityIncreaseUs; private final long maxDurationForQualityDecreaseUs; private final long minDurationToRetainAfterDiscardUs; private final float bandwidthFraction; private int selectedIndex; private int reason; /** * @param group The {@link TrackGroup}. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * empty. May be in any order. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. */ public AdaptiveTrackSelection(TrackGroup group, int[] tracks, BandwidthMeter bandwidthMeter) { this (group, tracks, bandwidthMeter, DEFAULT_MAX_INITIAL_BITRATE, DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION); } /** * @param group The {@link TrackGroup}. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * empty. May be in any order. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. * @param maxInitialBitrate The maximum bitrate in bits per second that should be assumed when a * bandwidth estimate is unavailable. * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can * be discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account * for inaccuracies in the bandwidth estimator. */ public AdaptiveTrackSelection(TrackGroup group, int[] tracks, BandwidthMeter bandwidthMeter, int maxInitialBitrate, long minDurationForQualityIncreaseMs, long maxDurationForQualityDecreaseMs, long minDurationToRetainAfterDiscardMs, float bandwidthFraction) { super(group, tracks); this.bandwidthMeter = bandwidthMeter; this.maxInitialBitrate = maxInitialBitrate; this.minDurationForQualityIncreaseUs = minDurationForQualityIncreaseMs * 1000L; this.maxDurationForQualityDecreaseUs = maxDurationForQualityDecreaseMs * 1000L; this.minDurationToRetainAfterDiscardUs = minDurationToRetainAfterDiscardMs * 1000L; this.bandwidthFraction = bandwidthFraction; selectedIndex = determineIdealSelectedIndex(Long.MIN_VALUE); reason = C.SELECTION_REASON_INITIAL; } @Override public void updateSelectedTrack(long bufferedDurationUs) { long nowMs = SystemClock.elapsedRealtime(); // Stash the current selection, then make a new one. int currentSelectedIndex = selectedIndex; selectedIndex = determineIdealSelectedIndex(nowMs); if (selectedIndex == currentSelectedIndex) { return; } if (!isBlacklisted(currentSelectedIndex, nowMs)) { // Revert back to the current selection if conditions are not suitable for switching. Format currentFormat = getFormat(currentSelectedIndex); Format selectedFormat = getFormat(selectedIndex); if (selectedFormat.bitrate > currentFormat.bitrate && bufferedDurationUs < minDurationForQualityIncreaseUs) { // The selected track is a higher quality, but we have insufficient buffer to safely switch // up. Defer switching up for now. selectedIndex = currentSelectedIndex; } else if (selectedFormat.bitrate < currentFormat.bitrate && bufferedDurationUs >= maxDurationForQualityDecreaseUs) { // The selected track is a lower quality, but we have sufficient buffer to defer switching // down for now. selectedIndex = currentSelectedIndex; } } // If we adapted, update the trigger. if (selectedIndex != currentSelectedIndex) { reason = C.SELECTION_REASON_ADAPTIVE; } } @Override public int getSelectedIndex() { return selectedIndex; } @Override public int getSelectionReason() { return reason; } @Override public Object getSelectionData() { return null; } @Override public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) { if (queue.isEmpty()) { return 0; } int queueSize = queue.size(); long bufferedDurationUs = queue.get(queueSize - 1).endTimeUs - playbackPositionUs; if (bufferedDurationUs < minDurationToRetainAfterDiscardUs) { return queueSize; } int idealSelectedIndex = determineIdealSelectedIndex(SystemClock.elapsedRealtime()); Format idealFormat = getFormat(idealSelectedIndex); // If the chunks contain video, discard from the first SD chunk beyond // minDurationToRetainAfterDiscardUs whose resolution and bitrate are both lower than the ideal // track. for (int i = 0; i < queueSize; i++) { MediaChunk chunk = queue.get(i); Format format = chunk.trackFormat; long durationBeforeThisChunkUs = chunk.startTimeUs - playbackPositionUs; if (durationBeforeThisChunkUs >= minDurationToRetainAfterDiscardUs && format.bitrate < idealFormat.bitrate && format.height != Format.NO_VALUE && format.height < 720 && format.width != Format.NO_VALUE && format.width < 1280 && format.height < idealFormat.height) { return i; } } return queueSize; } /** * Computes the ideal selected index ignoring buffer health. * * @param nowMs The current time in the timebase of {@link SystemClock#elapsedRealtime()}, or * {@link Long#MIN_VALUE} to ignore blacklisting. */ private int determineIdealSelectedIndex(long nowMs) { long bitrateEstimate = bandwidthMeter.getBitrateEstimate(); long effectiveBitrate = bitrateEstimate == BandwidthMeter.NO_ESTIMATE ? maxInitialBitrate : (long) (bitrateEstimate * bandwidthFraction); int lowestBitrateNonBlacklistedIndex = 0; for (int i = 0; i < length; i++) { if (nowMs == Long.MIN_VALUE || !isBlacklisted(i, nowMs)) { Format format = getFormat(i); if (format.bitrate <= effectiveBitrate) { return i; } else { lowestBitrateNonBlacklistedIndex = i; } } } return lowestBitrateNonBlacklistedIndex; } }
gpl-2.0
AntumDeluge/arianne-stendhal
src/games/stendhal/server/script/Debuggera.java
13714
/* $Id$ */ /*************************************************************************** * (C) Copyright 2003-2010 - Stendhal * *************************************************************************** *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ package games.stendhal.server.script; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import org.apache.log4j.Logger; import games.stendhal.common.Direction; import games.stendhal.common.parser.Sentence; import games.stendhal.server.core.engine.SingletonRepository; import games.stendhal.server.core.engine.StendhalRPWorld; import games.stendhal.server.core.engine.StendhalRPZone; import games.stendhal.server.core.events.TurnListener; import games.stendhal.server.core.scripting.ScriptImpl; import games.stendhal.server.core.scripting.ScriptingNPC; import games.stendhal.server.core.scripting.ScriptingSandbox; import games.stendhal.server.entity.npc.ChatAction; import games.stendhal.server.entity.npc.ConversationStates; import games.stendhal.server.entity.npc.EventRaiser; import games.stendhal.server.entity.npc.SpeakerNPC; import games.stendhal.server.entity.npc.action.SetQuestAction; import games.stendhal.server.entity.npc.condition.AdminCondition; import games.stendhal.server.entity.npc.condition.QuestInStateCondition; import games.stendhal.server.entity.npc.condition.QuestNotInStateCondition; import games.stendhal.server.entity.player.Player; import marauroa.common.game.IRPZone; /** * code for abstract/int_admin_playground which creates a NPC to help testers. * * @author hendrik */ public class Debuggera extends ScriptImpl { // boolean debuggeraEnabled; // private static final class DebuggeraEnablerAction implements ChatAction { // boolean enabled; // // public DebuggeraEnablerAction(final boolean enable) { // this.enabled = enable; // } // // public void fire(final Player player, final Sentence sentence, final EventRaiser raiser) { // // TODO debuggeraEnabled = enabled; // if (enabled) { // raiser.say("Thanks."); // } else { // raiser.say("OK, I will not talk to strangers"); // } // } // } private static final class QuestsAction implements ChatAction { ScriptingSandbox sandbox; public QuestsAction(final ScriptingSandbox sandbox) { this.sandbox = sandbox; } @Override public void fire(final Player player, final Sentence sentence, final EventRaiser raiser) { // list quest final StringBuilder sb = new StringBuilder("Your quest states are:"); final List<String> quests = player.getQuests(); for (final String quest : quests) { sb.append("\r\n" + quest + " = " + player.getQuest(quest)); } // change quest String quest = sentence.getOriginalText(); if (quest != null) { int pos = quest.indexOf("="); if (pos > -1) { final String value = quest.substring(pos + 1); quest = quest.substring(0, pos); sb.append("\r\n\r\nSet \"" + quest + "\" to \"" + value + "\""); sandbox.addGameEvent(player.getName(), "alter_quest", Arrays.asList(player.getName(), quest, value)); player.setQuest(quest.trim(), value.trim()); } } raiser.say(sb.toString()); } } private static final class TeleportNPCAction implements ChatAction { ScriptingSandbox sandbox; public TeleportNPCAction(final ScriptingSandbox sandbox) { this.sandbox = sandbox; } @Override public void fire(final Player player, final Sentence sentence, final EventRaiser raiser) { SingletonRepository.getTurnNotifier().notifyInTurns(0, new TeleportScriptAction(player, (SpeakerNPC) raiser.getEntity(), sandbox)); } } static class TeleportScriptAction implements TurnListener { private final ScriptingSandbox sandbox; private final Player player; private final SpeakerNPC engine; // private Sentence sentence; // // private int destIdx = 0; private int counter; private int inversedSpeed = 3; private int textCounter; private boolean beamed; // syntax-error: private final String[] MAGIC_PHRASE = {"Across the // land,", "Across the sea.", "Friends forever,", "We will always be."}; public TeleportScriptAction(final Player player, final SpeakerNPC engine, final ScriptingSandbox sandbox) { this.player = player; this.engine = engine; // this.sentence = sentence; this.sandbox = sandbox; } @Override public void onTurnReached(final int currentTurn) { boolean keepRunning = true; counter++; if (beamed) { // slow down if (counter % inversedSpeed == 0) { Direction direction = player.getDirection(); direction = Direction.build((direction.get()) % 4 + 1); player.setDirection(direction); sandbox.modify(player); if (direction == Direction.DOWN) { inversedSpeed++; if (inversedSpeed == 3) { keepRunning = false; } } } } else { // speed up if (counter % inversedSpeed == 0) { Direction direction = player.getDirection(); direction = Direction.build((direction.get()) % 4 + 1); player.setDirection(direction); sandbox.modify(player); if (direction == Direction.DOWN) { switch (textCounter) { case 0: engine.say("Across the land,"); inversedSpeed--; break; case 1: engine.say("Across the sea."); inversedSpeed--; break; case 2: engine.say("Friends forever,"); break; case 3: engine.say("We will always be."); break; default: // Teleport to a near by spot final StendhalRPZone zone = sandbox.getZone(player); final int x = player.getX(); final int y = player.getY(); final int[][] tele_offsets = { { 7, 7 }, { 7, -7 }, { -7, 7 }, { -7, -7 } }; final Random random = new Random(); for (int i = 0; i < 3; i++) { final int r = random.nextInt(tele_offsets.length); if (player.teleport(zone, x + tele_offsets[r][0], y + tele_offsets[r][1], null, null)) { break; } } inversedSpeed = 1; beamed = true; break; } textCounter++; } } } if (keepRunning) { SingletonRepository.getTurnNotifier().notifyInTurns(0, this); } } } public class SightseeingAction implements ChatAction, TurnListener { private Player player; private final List<String> zones; private int counter; public SightseeingAction(final StendhalRPWorld world) { // this.sandbox = sandbox; zones = new ArrayList<String>(); for (final IRPZone irpZone : world) { final StendhalRPZone zone = (StendhalRPZone) irpZone; zones.add(zone.getName()); } } @Override public void fire(final Player player, final Sentence sentence, final EventRaiser raiser) { this.player = player; counter = 0; player.sendPrivateText("Let's start"); SingletonRepository.getTurnNotifier().notifyInTurns(10, this); } @Override public void onTurnReached(final int currentTurn) { try { final String zoneName = zones.get(counter); final StendhalRPZone zone = SingletonRepository.getRPWorld().getZone(zoneName); final int[][] tele_xy = { { 5, 5 }, { 50, 50 }, { 20, 20 }, { 100, 100 }, { 100, 5 } }; boolean foundSpot = false; for (int i = 0; i < tele_xy.length; i++) { if (player.teleport(zone, tele_xy[i][0], tele_xy[i][1], null, null)) { player.sendPrivateText("Welcome in " + zoneName); foundSpot = true; break; } } if (!foundSpot) { player.sendPrivateText("Sorry, did not find a free spot in " + zoneName); } } catch (final Exception e) { Logger.getLogger(SightseeingAction.class).error(e, e); } counter++; if (counter < zones.size() - 1) { SingletonRepository.getTurnNotifier().notifyInTurns(10, this); } } } @Override public void load(final Player admin, final List<String> args, final ScriptingSandbox sandbox) { super.load(admin, args, sandbox); // Create NPC final ScriptingNPC npc = new ScriptingNPC("Debuggera"); npc.setEntityClass("girlnpc"); // Place NPC in int_admin_playground on server start final String myZone = "int_admin_playground"; sandbox.setZone(myZone); int x = 4; int y = 11; // If this script is executed by an admin, Debuggera will be placed next // to him/her. if (admin != null) { sandbox.setZone(sandbox.getZone(admin)); x = admin.getX() + 1; y = admin.getY(); } // Set zone and position npc.setPosition(x, y); npc.setDirection(Direction.DOWN); sandbox.add(npc); // npc.add(ConversationStates.IDLE, Arrays.asList("hi", "hello", "greetings", "hola"), null, ConversationStates.IDLE, "My mom said, i am not allowed to talk to strangers.", null); npc.behave("bye", "Bye."); // Greating and admins may enable or disable her npc.add(ConversationStates.IDLE, Arrays.asList("hi", "hello", "greetings", "hola"), new AdminCondition(), ConversationStates.ATTENDING, "Hi, game master. Do you think i am #crazy?", null); // npc.add(ConversationStates.IDLE, [ "hi","hello","greetings","hola" ], // new AdminCondition(), ConversationStates.QUESTION_1, // "May I talk to strangers?", null); // npc.add(ConversationStates.QUESTION_1, SpeakerNPC.YES_MESSAGES, new AdminCondition(), // ConversationStates.ATTENDING, null, new DebuggeraEnablerAction(true)); // npc.add(ConversationStates.QUESTION_1, ConversationPhrases.NO_MESSAGES, new AdminCondition(), // ConversationStates.ATTENDING, null, new DebuggeraEnablerAction(false)); npc.behave(Arrays.asList("insane", "crazy", "mad"), "Why are you so mean? I AM NOT INSANE. My mummy says, I am a #special child."); npc.behave( Arrays.asList("special", "special child"), "I can see another world in my dreams. That are more thans dreams. There the people are sitting in front of machines called computers. This are realy strange people. They cannot use telepathy without something they call inter-network. But these people and machines are somehow connected to our world. If I concentrate, I can #change thinks in our world."); // npc.behave("verschmelzung", "\r\nYou have one hand,\r\nI have the // other.\r\nPut them together,\r\nWe have each other."); npc.add( ConversationStates.ATTENDING, Arrays.asList("susi"), null, ConversationStates.ATTENDING, "Yes, she is my twin sister. People consider her normal because she hides her special abilities.", null); // change npc.add(ConversationStates.ATTENDING, Arrays.asList("change", "change"), new QuestInStateCondition( "debuggera", "friends"), ConversationStates.ATTENDING, "I can teleport you.", null); npc.add(ConversationStates.ATTENDING, Arrays.asList("change", "change"), new QuestNotInStateCondition("debuggera", "friends"), ConversationStates.ATTENDING, "Do you want to become my #friend?", null); // friends npc.add(ConversationStates.ATTENDING, Arrays.asList("friend", "friends"), new QuestInStateCondition( "debuggera", "friends"), ConversationStates.ATTENDING, "We are friends.", null); npc.add( ConversationStates.ATTENDING, Arrays.asList("friend", "friends"), new QuestNotInStateCondition("debuggera", "friends"), ConversationStates.INFORMATION_1, "Please repeat:\r\n \"A circle is round,\"", null); npc.add(ConversationStates.INFORMATION_1, Arrays.asList( "A circle is round,", "A circle is round"), null, ConversationStates.INFORMATION_2, "\"it has no end.\"", null); npc.add(ConversationStates.INFORMATION_2, Arrays.asList( "it has no end.", "it has no end"), null, ConversationStates.INFORMATION_3, "\"That's how long,\"", null); npc.add(ConversationStates.INFORMATION_3, Arrays.asList( "That's how long,", "That's how long", "Thats how long,", "Thats how long"), null, ConversationStates.INFORMATION_4, "\"I will be your friend.\"", null); npc.add(ConversationStates.INFORMATION_4, Arrays.asList( "I will be your friend.", "I will be your friend"), null, ConversationStates.ATTENDING, "Cool. We are friends now.", new SetQuestAction("debuggera", "friends")); // quests npc.add(ConversationStates.ATTENDING, "quest", new AdminCondition(), ConversationStates.ATTENDING, null, new QuestsAction(sandbox)); // teleport npc.add(ConversationStates.ATTENDING, Arrays.asList("teleport", "teleportme"), new AdminCondition(), ConversationStates.IDLE, null, new TeleportNPCAction(sandbox)); final StendhalRPWorld world = SingletonRepository.getRPWorld(); npc.add(ConversationStates.ATTENDING, Arrays.asList("sightseeing", "memory", "memoryhole"), new AdminCondition(), ConversationStates.IDLE, null, new SightseeingAction(world)); } /* * Make new friends, but keep the old. One is silver, And the other gold, * * You help me, And I'll help you. And together, We will see it through. * * The sky is blue, The Earth Earth is green. I can help, To keep it clean. */ }
gpl-2.0
SWGANHServices/SWGANHJava
documents/SWGCombined/src/Waypoint.java
2250
/** * The Waypoint class is a container class that holds information on Waypoints. Their planet, their location, their type (mission waypoint, player-created waypoint, quest waypoint, etc.) * @author Darryl * */ public class Waypoint extends SOEObject{ public final static long serialVersionUID = 1l; private boolean bActivated; private byte iWaypointType; private String sName; private int planetCRC; private long ownerID; /** * Creates a new, default waypoint. */ public Waypoint() { super(); } /** * Gets the owner of this waypoint. * @return The owner's Object ID. */ public long getOwnerID() { return ownerID; } /** * Sets the owner of this waypoint. * @param id -- The owner's Object ID. */ public void setOwnerID(long id) { ownerID = id; } /** * Gets the Planet CRC for this waypoint. * @return The Planet CRC. */ public int getPlanetCRC() { return planetCRC; } /** * Sets the Planet CRC for this waypoint. * @param crc -- The Planet CRC. */ public void setPlanetCRC(int crc) { planetCRC = crc; } /** * Gets the name of this waypoint. * @return The name. */ public String getName() { if(sName == null) { sName = ""; } return sName; } /** * Sets the name of this waypoint. * @param sName -- The name. */ public void setName(String sName) { this.sName = sName; } /** * Gets the waypoint type. * @return The waypoint type. */ public byte getWaypointType() { return iWaypointType; } /** * Sets the waypoint type. * @param b -- The waypoint type. */ public void setWaypointType(byte b) { iWaypointType = b; } /** * Gets whether this waypoint is currently active in the world. * @return True if the waypoint is active, false if it isn't. */ public boolean getIsActivated() { return bActivated; } /** * Sets the active status of this waypoint. * @param b -- The waypoint active status. */ public void setIsActivated(boolean b) { bActivated = b; } /** * Simply toggles the active status of this waypoint. If it was on, now it is off. If it was off, now it is on. */ public void toggleIsActivated() { bActivated = !bActivated; } }
gpl-3.0
dotCMS/core-2.x
src/com/dotmarketing/portlets/virtuallinks/struts/VirtualLinkForm.java
2642
package com.dotmarketing.portlets.virtuallinks.struts; import javax.servlet.http.HttpServletRequest; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionMapping; import org.apache.struts.validator.ValidatorForm; import com.dotmarketing.util.InodeUtils; import com.liferay.portal.util.Constants; /** @author Hibernate CodeGenerator */ public class VirtualLinkForm extends ValidatorForm { /** identifier field */ private String inode; /** nullable persistent field */ private String title; /** nullable persistent field */ private String url; /** nullable persistent field */ private String uri; /** nullable persistent field */ private boolean active; /** nullable persistent field */ private String htmlInode; /** nullable persistent field */ private String hostId; /** default constructor */ public VirtualLinkForm() { active = true; htmlInode = ""; } public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { if(request.getParameter("cmd")!=null && request.getParameter("cmd").equals(Constants.ADD)) { return super.validate(mapping, request); } return null; } /** * @return Returns the active. */ public boolean isActive() { return active; } /** * @param active The active to set. */ public void setActive(boolean active) { this.active = active; } /** * @return Returns the inode. */ public String getInode() { if(InodeUtils.isSet(inode)) return inode; return ""; } /** * @param inode The inode to set. */ public void setInode(String inode) { this.inode = inode; } /** * @return Returns the uri. */ public String getUri() { return uri; } /** * @param uri The uri to set. */ public void setUri(String uri) { this.uri = uri; } /** * @return Returns the url. */ public String getUrl() { return url; } /** * @param url The url to set. */ public void setUrl(String url) { this.url = url; } /** * @return Returns the htmlInode. */ public String getHtmlInode() { return htmlInode; } /** * @param htmlInode The htmlInode to set. */ public void setHtmlInode(String htmlInode) { this.htmlInode = htmlInode; } /** * @return Returns the title. */ public String getTitle() { return title; } /** * @param title The title to set. */ public void setTitle(String title) { this.title = title; } public String getHostId() { return hostId; } public void setHostId(String hostId) { this.hostId = hostId; } }
gpl-3.0
HostileNetworks/AncientWarfare2_ReignModpack
src/main/java/net/shadowmage/ancientwarfare/npc/entity/faction/NpcNativeMountedSoldier.java
359
package net.shadowmage.ancientwarfare.npc.entity.faction; import net.minecraft.world.World; public class NpcNativeMountedSoldier extends NpcFactionMountedSoldier { public NpcNativeMountedSoldier(World par1World) { super(par1World); } @Override public String getNpcType() { return "native.cavalry"; } }
gpl-3.0
Mordenkainen/CompactMachines
src/main/java/org/dave/CompactMachines/proxy/IProxy.java
290
package org.dave.CompactMachines.proxy; public interface IProxy { public abstract void registerTileEntities(); public abstract void registerHandlers(); public abstract void registerVillagerSkins(); public abstract void registerRenderers(); public abstract boolean isClient(); }
gpl-3.0
VegasGoat/TFCraft
src/Common/com/bioxx/tfc/Handlers/ServerTickHandler.java
1420
package com.bioxx.tfc.Handlers; import net.minecraft.server.MinecraftServer; import net.minecraft.world.World; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.gameevent.TickEvent.Phase; import cpw.mods.fml.common.gameevent.TickEvent.WorldTickEvent; import com.bioxx.tfc.Core.TFC_Core; import com.bioxx.tfc.Core.TFC_Time; import com.bioxx.tfc.api.TFCOptions; public class ServerTickHandler { private long wSeed = Long.MIN_VALUE; public int ticks; @SubscribeEvent public void onServerWorldTick(WorldTickEvent event) { World world = event.world; if(event.phase == Phase.START) { if(world.provider.dimensionId == 0 && world.getWorldInfo().getSeed() != wSeed) { TFC_Core.setupWorld(world); wSeed = world.getWorldInfo().getSeed(); } TFC_Time.updateTime(world); /*if(ServerOverrides.isServerEmpty()) return;*/ if(MinecraftServer.getServer().getCurrentPlayerCount() == 0 && TFCOptions.simSpeedNoPlayers > 0) { ticks++; long t = world.getWorldInfo().getWorldTotalTime(); long w = world.getWorldInfo().getWorldTime(); if(ticks < TFCOptions.simSpeedNoPlayers) { world.getWorldInfo().incrementTotalWorldTime(t-1L); world.getWorldInfo().setWorldTime(w-1L); } else { ticks = 0; } } } /*else if(event.phase == Phase.END) { }*/ } }
gpl-3.0
Scrik/Cauldron-1
eclipse/cauldron/src/main/java/net/minecraft/client/model/ModelWitch.java
3183
package net.minecraft.client.model; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import net.minecraft.entity.Entity; import net.minecraft.util.MathHelper; @SideOnly(Side.CLIENT) public class ModelWitch extends ModelVillager { public boolean field_82900_g; private ModelRenderer field_82901_h = (new ModelRenderer(this)).setTextureSize(64, 128); private ModelRenderer witchHat; private static final String __OBFID = "CL_00000866"; public ModelWitch(float p_i1166_1_) { super(p_i1166_1_, 0.0F, 64, 128); this.field_82901_h.setRotationPoint(0.0F, -2.0F, 0.0F); this.field_82901_h.setTextureOffset(0, 0).addBox(0.0F, 3.0F, -6.75F, 1, 1, 1, -0.25F); this.villagerNose.addChild(this.field_82901_h); this.witchHat = (new ModelRenderer(this)).setTextureSize(64, 128); this.witchHat.setRotationPoint(-5.0F, -10.03125F, -5.0F); this.witchHat.setTextureOffset(0, 64).addBox(0.0F, 0.0F, 0.0F, 10, 2, 10); this.villagerHead.addChild(this.witchHat); ModelRenderer modelrenderer = (new ModelRenderer(this)).setTextureSize(64, 128); modelrenderer.setRotationPoint(1.75F, -4.0F, 2.0F); modelrenderer.setTextureOffset(0, 76).addBox(0.0F, 0.0F, 0.0F, 7, 4, 7); modelrenderer.rotateAngleX = -0.05235988F; modelrenderer.rotateAngleZ = 0.02617994F; this.witchHat.addChild(modelrenderer); ModelRenderer modelrenderer1 = (new ModelRenderer(this)).setTextureSize(64, 128); modelrenderer1.setRotationPoint(1.75F, -4.0F, 2.0F); modelrenderer1.setTextureOffset(0, 87).addBox(0.0F, 0.0F, 0.0F, 4, 4, 4); modelrenderer1.rotateAngleX = -0.10471976F; modelrenderer1.rotateAngleZ = 0.05235988F; modelrenderer.addChild(modelrenderer1); ModelRenderer modelrenderer2 = (new ModelRenderer(this)).setTextureSize(64, 128); modelrenderer2.setRotationPoint(1.75F, -2.0F, 2.0F); modelrenderer2.setTextureOffset(0, 95).addBox(0.0F, 0.0F, 0.0F, 1, 2, 1, 0.25F); modelrenderer2.rotateAngleX = -0.20943952F; modelrenderer2.rotateAngleZ = 0.10471976F; modelrenderer1.addChild(modelrenderer2); } public void setRotationAngles(float p_78087_1_, float p_78087_2_, float p_78087_3_, float p_78087_4_, float p_78087_5_, float p_78087_6_, Entity p_78087_7_) { super.setRotationAngles(p_78087_1_, p_78087_2_, p_78087_3_, p_78087_4_, p_78087_5_, p_78087_6_, p_78087_7_); this.villagerNose.offsetX = this.villagerNose.offsetY = this.villagerNose.offsetZ = 0.0F; float f6 = 0.01F * (float)(p_78087_7_.getEntityId() % 10); this.villagerNose.rotateAngleX = MathHelper.sin((float)p_78087_7_.ticksExisted * f6) * 4.5F * (float)Math.PI / 180.0F; this.villagerNose.rotateAngleY = 0.0F; this.villagerNose.rotateAngleZ = MathHelper.cos((float)p_78087_7_.ticksExisted * f6) * 2.5F * (float)Math.PI / 180.0F; if (this.field_82900_g) { this.villagerNose.rotateAngleX = -0.9F; this.villagerNose.offsetZ = -0.09375F; this.villagerNose.offsetY = 0.1875F; } } }
gpl-3.0
batmancn/MyLife
learn/demos/java_demo/Thinking in Java Fourth Edition Code/TIJ4-code/access/ChocolateChip2.java
436
//: access/ChocolateChip2.java import access.cookie2.*; public class ChocolateChip2 extends Cookie { public ChocolateChip2() { System.out.println("ChocolateChip2 constructor"); } public void chomp() { bite(); } // Protected method public static void main(String[] args) { ChocolateChip2 x = new ChocolateChip2(); x.chomp(); } } /* Output: Cookie constructor ChocolateChip2 constructor bite *///:~
gpl-3.0
eethomas/eucalyptus
clc/modules/msgs/src/main/java/com/eucalyptus/util/dns/DomainNames.java
11570
/************************************************************************* * Copyright 2009-2015 Eucalyptus Systems, Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see http://www.gnu.org/licenses/. * * Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta * CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need * additional information or have any questions. * * This file may incorporate work covered under the following copyright * and permission notice: * * Software License Agreement (BSD License) * * Copyright (c) 2008, Regents of the University of California * All rights reserved. * * Redistribution and use of this software in source and binary forms, * with or without modification, are permitted provided that the * following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. USERS OF THIS SOFTWARE ACKNOWLEDGE * THE POSSIBLE PRESENCE OF OTHER OPEN SOURCE LICENSED MATERIAL, * COPYRIGHTED MATERIAL OR PATENTED MATERIAL IN THIS SOFTWARE, * AND IF ANY SUCH MATERIAL IS DISCOVERED THE PARTY DISCOVERING * IT MAY INFORM DR. RICH WOLSKI AT THE UNIVERSITY OF CALIFORNIA, * SANTA BARBARA WHO WILL THEN ASCERTAIN THE MOST APPROPRIATE REMEDY, * WHICH IN THE REGENTS' DISCRETION MAY INCLUDE, WITHOUT LIMITATION, * REPLACEMENT OF THE CODE SO IDENTIFIED, LICENSING OF THE CODE SO * IDENTIFIED, OR WITHDRAWAL OF THE CODE CAPABILITY TO THE EXTENT * NEEDED TO COMPLY WITH ANY SUCH LICENSES OR RIGHTS. ************************************************************************/ package com.eucalyptus.util.dns; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import org.apache.log4j.Logger; import org.xbill.DNS.DClass; import org.xbill.DNS.NSRecord; import org.xbill.DNS.Name; import org.xbill.DNS.NameTooLongException; import org.xbill.DNS.TextParseException; import com.eucalyptus.component.ComponentId; import com.eucalyptus.component.ComponentIds; import com.eucalyptus.component.Components; import com.eucalyptus.component.ServiceConfiguration; import com.eucalyptus.component.id.Dns; import com.eucalyptus.util.Exceptions; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import edu.ucsb.eucalyptus.cloud.entities.SystemConfiguration; /** * Facade for interacting w/ the internal domain name handling. * * Note that there are no methods in this API which take a String. That is intentional -- if you * feel the need to munge strings do so on the calling side. * * @author chris grzegorczyk <grze@eucalyptus.com> */ public class DomainNames { /** * */ private static final Name ROOT_NAME = Name.fromConstantString( "." ); private static Logger LOG = Logger.getLogger( DomainNames.class ); /** * @return Subdomain representing the cloud internal DNS subdomain for {@code ComponentId} */ public static Name internalSubdomain( Class<? extends ComponentId> componentId ) { return SystemSubdomain.INTERNAL.apply( componentId ); } /** * @return Subdomains representing the cloud internal DNS subdomain for {@code ComponentId} */ public static Set<Name> internalSubdomains( Class<? extends ComponentId> componentId ) { return SystemSubdomain.INTERNAL.names( componentId ); } /** * @return Subdomain representing the external system DNS subdomain for {@code ComponentId} */ public static Name externalSubdomain( Class<? extends ComponentId> componentId ) { return SystemSubdomain.EXTERNAL.apply( componentId ); } /** * @return Subdomains representing the external system DNS subdomains for {@code ComponentId} */ public static Set<Name> externalSubdomains( Class<? extends ComponentId> componentId ) { return SystemSubdomain.EXTERNAL.names( componentId ); } /** * @return Subdomain representing the cloud internal DNS subdomain */ public static Name internalSubdomain( ) { return SystemSubdomain.INTERNAL.get( ); } /** * @return Subdomain representing the external system DNS subdomain */ public static Name externalSubdomain( ) { return SystemSubdomain.EXTERNAL.get( ); } /** * Determines whether the given {@code name} is a subdomain of some zone under the control of * the system. * * @param name Name to test * @return true if the name is a system subdomain name. */ public static boolean isSystemSubdomain( Name name ) { return isInternalSubdomain( name ) || isExternalSubdomain( name ); } /** * Determines whether the given {@code name} is a subdomain of the external DNS subdomain. * * @param name Name to test * @return true if the name is an external subdomain name. */ public static boolean isExternalSubdomain( Name name ) { return name.subdomain( SystemSubdomain.EXTERNAL.get( ) ); } /** * Determines whether the given {@code name} is a subdomain of the internal DNS subdomain. * * @param name Name to test * @return true if the name is an internal subdomain name. */ public static boolean isInternalSubdomain( Name name ) { return name.subdomain( SystemSubdomain.INTERNAL.get( ) ); } /** * Get the system domain for which the given name is a subdomain. * * @param componentId The component to check names for * @param perhapsSystemSubdomain The name to check * @return The optional system domain (internal or external) */ public static Optional<Name> systemDomainFor( final Class<? extends ComponentId> componentId, final Name perhapsSystemSubdomain ) { Optional<Name> systemDomainResult = Optional.absent( ); for ( final Name systemDomain : Iterables.concat( DomainNames.externalSubdomains( componentId ), DomainNames.internalSubdomains( componentId ) ) ) { if ( perhapsSystemSubdomain.subdomain( systemDomain ) && !perhapsSystemSubdomain.equals( systemDomain ) ) { systemDomainResult = Optional.of( systemDomain ); break; } } return systemDomainResult; } /** * Get the list of Name Server Records for the given Name if we are authoritative. That is, only * ever return Names which refer to our interna DNS server. * * @param systemDomain the name for which to return our nameserver set for if we are authoritative * @return List of Name's of the local systems if we are authoritative * @throws NoSuchElementException if the name is not authoritivately served by us */ public static List<NSRecord> nameServerRecords( Name systemDomain ) throws NoSuchElementException { return SystemSubdomain.lookup( systemDomain ).getNameServers( ); } public static Name sourceOfAuthority( Name name ) throws NoSuchElementException { return SystemSubdomain.lookup( name ).get( ); } private enum SystemSubdomain implements Function<Class<? extends ComponentId>, Name>, Supplier<Name> { INTERNAL { @Override public Name get( ) { return DomainNames.absolute( Name.fromConstantString( INTERNAL_SUBDOMAIN ) ); } }, EXTERNAL { @Override public Name get( ) { try { return DomainNames.absolute( Name.fromString( SystemConfiguration.getSystemConfiguration().getDnsDomain() ) ); } catch ( final TextParseException e ) { return DomainNames.absolute( Name.fromConstantString( "localhost" ) ); } } }; private static final String INTERNAL_SUBDOMAIN = "internal."; //GRZE: this is constant per the AWS spec @Override public Name apply( Class<? extends ComponentId> input ) { Name compName = Name.fromConstantString( ComponentIds.lookup( input ).name( ) ); return absolute( compName, this.get( ) ); } public Set<Name> names( final Class<? extends ComponentId> input ) { final Set<Name> names = Sets.newLinkedHashSet( ); final ComponentId componentId = ComponentIds.lookup( input ); final Name domain = get( ); for ( final String name : componentId.getAllServiceNames( ) ) { names.add( absolute( Name.fromConstantString( name ), domain ) ); } return names; } public List<NSRecord> getNameServers( ) { List<NSRecord> nsRecs = Lists.newArrayList( ); int idx = 1; for ( ServiceConfiguration conf : Components.lookup( Dns.class ).services( ) ) { nsRecs.add( new NSRecord( this.get( ), DClass.IN, 60, Name.fromConstantString( "ns" + idx++ + "." + this.get( ).toString( ) ) ) ); } return nsRecs; } public static SystemSubdomain lookup( Name name ) { for ( SystemSubdomain s : SystemSubdomain.values( ) ) { if ( name.subdomain( s.get( ) ) ) { return s; } } throw new NoSuchElementException( "Failed to lookup SystemSubdomain for the name: " + name ); } } public static Name root( ) { return ROOT_NAME; } public static Name absolute( Name name ) { return absolute( name, ROOT_NAME ); } public static Name absolute( Name name, Name origin ) { if ( name.isAbsolute( ) ) { return name; } else { return concatenateConstant( name, origin ); } } public static Name relativize( Name name, Name origin ) { return name.relativize( origin ); } public static Name concatenateConstant( final Name one, final Name two ) { try { return Name.concatenate( one, two ); } catch ( NameTooLongException ex ) { LOG.error( ex ); throw Exceptions.toUndeclared( ex ); } } }
gpl-3.0
wicky-info/CloudCoder
CloudCoder/src/org/cloudcoder/app/client/view/CourseAdminProblemListView.java
14408
// CloudCoder - a web-based pedagogical programming environment // Copyright (C) 2011-2014, Jaime Spacco <jspacco@knox.edu> // Copyright (C) 2011-2014, David H. Hovemeyer <david.hovemeyer@gmail.com> // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. package org.cloudcoder.app.client.view; import java.util.Arrays; import java.util.Collection; import java.util.Set; import org.cloudcoder.app.client.model.Session; import org.cloudcoder.app.client.page.CloudCoderPage; import org.cloudcoder.app.client.page.ProblemAdminPage; import org.cloudcoder.app.client.page.SessionObserver; import org.cloudcoder.app.client.rpc.RPC; import org.cloudcoder.app.shared.model.CloudCoderAuthenticationException; import org.cloudcoder.app.shared.model.Course; import org.cloudcoder.app.shared.model.CourseSelection; import org.cloudcoder.app.shared.model.ICallback; import org.cloudcoder.app.shared.model.Module; import org.cloudcoder.app.shared.model.Problem; import org.cloudcoder.app.shared.model.ProblemAndModule; import org.cloudcoder.app.shared.model.ProblemAndSubmissionReceipt; import org.cloudcoder.app.shared.model.ProblemAuthorship; import org.cloudcoder.app.shared.model.User; import org.cloudcoder.app.shared.util.Publisher; import org.cloudcoder.app.shared.util.Subscriber; import org.cloudcoder.app.shared.util.SubscriptionRegistrar; import com.google.gwt.cell.client.AbstractCell; import com.google.gwt.cell.client.EditTextCell; import com.google.gwt.cell.client.FieldUpdater; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.cellview.client.DataGrid; import com.google.gwt.user.cellview.client.TextColumn; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.ResizeComposite; import com.google.gwt.view.client.MultiSelectionModel; import com.google.gwt.view.client.SelectionChangeEvent; import com.google.gwt.view.client.SelectionChangeEvent.Handler; /** * View to show problems in the {@link ProblemAdminPage}. * * @author David Hovemeyer */ public class CourseAdminProblemListView extends ResizeComposite implements Subscriber, SessionObserver { private static final String CHECKMARK_URL = GWT.getModuleBaseForStaticFiles() + "images/check-mark-icon-sm.png"; private CloudCoderPage page; private DataGrid<ProblemAndModule> grid; private Session session; private MultiSelectionModel<ProblemAndModule> selectionModel; private ICallback<ProblemAndModule> editModuleNameCallback; /** * Constructor. */ public CourseAdminProblemListView(CloudCoderPage page) { this.page = page; grid = new DataGrid<ProblemAndModule>(); grid.addColumn(new ProblemIdColumn(), "Id"); grid.addColumn(new ProblemNameColumn(), "Name"); grid.addColumn(new ProblemBriefDescriptionColumn(), "Description"); grid.addColumn(new ProblemTypeColumn(), "Type"); grid.addColumn(new ProblemWhenAssignedColumn(), "Assigned"); grid.addColumn(new ProblemWhenDueColumn(), "Due"); grid.addColumn(new ProblemVisibleColumn(), "Visible"); grid.addColumn(new ProblemLicense(), "License"); grid.addColumn(new ProblemSharedColumn(), "Shared"); grid.setColumnWidth(0, 5.0, Unit.PCT); grid.setColumnWidth(1, 12.5, Unit.PCT); grid.setColumnWidth(2, 22.0, Unit.PCT); grid.setColumnWidth(3, 12.5, Unit.PCT); grid.setColumnWidth(4, 9.0, Unit.PCT); grid.setColumnWidth(5, 9.0, Unit.PCT); grid.setColumnWidth(6, 60.0, Unit.PX); grid.setColumnWidth(7, 10.0, Unit.PCT); grid.setColumnWidth(8, 60.0, Unit.PX); grid.setColumnWidth(9, 20.0, Unit.PCT); // The column displaying the module name allows editing, and invokes // a callback when the module name changes. ProblemModuleNameColumn moduleNameColumn = new ProblemModuleNameColumn(); grid.addColumn(moduleNameColumn, "Module (click to edit)"); moduleNameColumn.setFieldUpdater(new FieldUpdater<ProblemAndModule, String>() { @Override public void update(int index, ProblemAndModule object, String value) { if (!value.equals(object.getModule().getName())) { object.getModule().setName(value); if (editModuleNameCallback != null) { editModuleNameCallback.call(object); } } } }); initWidget(grid); } /** * Set a callback to be invoked when the module name of a problem is changed. * * @param callback callback invoked when the module name of a problem is changed */ public void setEditModuleNameCallback(ICallback<ProblemAndModule> callback) { this.editModuleNameCallback = callback; } private static class ProblemIdColumn extends TextColumn<ProblemAndModule> { @Override public String getValue(ProblemAndModule object) { return String.valueOf(object.getProblem().getProblemId()); } } private static class ProblemNameColumn extends TextColumn<ProblemAndModule> { /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { return object.getProblem().getTestname(); } } private static class ProblemBriefDescriptionColumn extends TextColumn<ProblemAndModule> { /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { return object.getProblem().getBriefDescription(); } } private static class ProblemTypeColumn extends TextColumn<ProblemAndModule> { /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { return object.getProblem().getProblemType().toString().replace("_", " "); } } private static class ProblemWhenAssignedColumn extends TextColumn<ProblemAndModule> { /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { return ViewUtil.formatDate(object.getProblem().getWhenAssignedAsDate()); } } private static class ProblemWhenDueColumn extends TextColumn<ProblemAndModule> { /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { return ViewUtil.formatDate(object.getProblem().getWhenDueAsDate()); } } private static class ProblemLicense extends TextColumn<ProblemAndModule> { /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { switch (object.getProblem().getLicense()) { case CC_ATTRIB_SHAREALIKE_3_0: return "CC"; case GNU_FDL_1_3_NO_EXCEPTIONS: return "GFDL"; case NOT_REDISTRIBUTABLE: return "not permissive"; default: return "unknown"; } } } private static class ProblemVisibleColumn extends TextColumn<ProblemAndModule> { /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { return object.getProblem().isVisible() ? "true" : "false"; } } private static class ProblemSharedCell extends AbstractCell<Boolean> { @Override public void render(com.google.gwt.cell.client.Cell.Context context, Boolean value, SafeHtmlBuilder sb) { if (value) { sb.appendHtmlConstant("<img src=\"" + CHECKMARK_URL + "\" alt=\"yes\" />"); } } } private static class ProblemSharedColumn extends Column<ProblemAndModule, Boolean> { public ProblemSharedColumn() { super(new ProblemSharedCell()); } @Override public Boolean getValue(ProblemAndModule object) { return object.getProblem().isShared(); } } private static class ProblemModuleNameColumn extends Column<ProblemAndModule, String> { public ProblemModuleNameColumn() { super(new EditTextCell()); } /* (non-Javadoc) * @see com.google.gwt.user.cellview.client.Column#getValue(java.lang.Object) */ @Override public String getValue(ProblemAndModule object) { return object.getModule().getName(); } } /* (non-Javadoc) * @see org.cloudcoder.app.client.page.SessionObserver#activate(org.cloudcoder.app.client.model.Session, org.cloudcoder.app.shared.util.SubscriptionRegistrar) */ @Override public void activate(final Session session, SubscriptionRegistrar subscriptionRegistrar) { this.session = session; session.subscribe(Session.Event.ADDED_OBJECT, this, subscriptionRegistrar); // Set selection model. // When a Problem is selected, it will be added to the Session. this.selectionModel = new MultiSelectionModel<ProblemAndModule>(); selectionModel.addSelectionChangeHandler(new Handler() { @Override public void onSelectionChange(SelectionChangeEvent event) { Set<ProblemAndModule> problemModuleSet=selectionModel.getSelectedSet(); Problem[] problems=getProblemsFromProblemAndModule(problemModuleSet); if (problems!=null) { session.add(problems); if (problems.length==1) { // If there's only one problem, add it by itself to the session // This makes the options that operate on a single problem work better session.add(problems[0]); } } } }); grid.setSelectionModel(selectionModel); // Force loading of problems in course. // This avoids the problem that if a module in a course was selected // in the courses/problems page, some of the problems may not be // in the session (because they weren't in the selected module). CourseSelection courseSelection = session.get(CourseSelection.class); Course course = courseSelection.getCourse(); loadProblems(session, course); } public static Problem[] getProblemsFromProblemAndModule(Collection<ProblemAndModule> collection) { Problem[] problems=new Problem[collection.size()]; int i=0; for (ProblemAndModule pm : collection) { problems[i]=pm.getProblem(); i++; } return problems; } /** * Get the currently-selected {@link Problem}s. * * @return the currently-selected {@link Problem}s */ public Problem[] getSelected() { return getProblemsFromProblemAndModule(selectionModel.getSelectedSet()); } /** * Force {@link Problem}s to be reloaded. * * @param session the current {@link Session} * @param course the current {@link Course} */ public void loadProblems(final Session session, final Course course) { RPC.getCoursesAndProblemsService.getProblemAndSubscriptionReceipts(course, session.get(User.class), (Module)null, new AsyncCallback<ProblemAndSubmissionReceipt[]>() { /* (non-Javadoc) * @see com.google.gwt.user.client.rpc.AsyncCallback#onSuccess(java.lang.Object) */ @Override public void onSuccess(ProblemAndSubmissionReceipt[] result) { displayProblems(result); } /* (non-Javadoc) * @see com.google.gwt.user.client.rpc.AsyncCallback#onFailure(java.lang.Throwable) */ @Override public void onFailure(Throwable caught) { if (caught instanceof CloudCoderAuthenticationException) { page.recoverFromServerSessionTimeout(new Runnable() { public void run() { // Try again! loadProblems(session, course); } }); } } }); } /* (non-Javadoc) * @see org.cloudcoder.app.shared.util.Subscriber#eventOccurred(java.lang.Object, org.cloudcoder.app.shared.util.Publisher, java.lang.Object) */ @Override public void eventOccurred(Object key, Publisher publisher, Object hint) { if (key == Session.Event.ADDED_OBJECT && (hint instanceof CourseSelection)) { // Course selected, load its problems. // Note that this isn't really needed by CourseAdminPage (because there // is only one Course which is pre-selected), but if this view is // reused in another page at some point, this might be useful. CourseSelection courseSelection = (CourseSelection) hint; Course course = courseSelection.getCourse(); loadProblems(session, course); } else if (key == Session.Event.ADDED_OBJECT && (hint instanceof ProblemAndSubmissionReceipt[])) { // This can happen when these is an explicit reload of problems displayProblems((ProblemAndSubmissionReceipt[]) hint); } } protected void displayProblems(ProblemAndSubmissionReceipt[] problemAndSubmissionReceiptList) { ProblemAndModule[] problems = new ProblemAndModule[problemAndSubmissionReceiptList.length]; int count = 0; for (ProblemAndSubmissionReceipt p : problemAndSubmissionReceiptList) { problems[count++] = new ProblemAndModule(p.getProblem(), p.getModule()); } displayProblems(problems); } protected void displayProblems(ProblemAndModule[] result) { grid.setRowCount(result.length); grid.setRowData(Arrays.asList(result)); grid.setVisibleRange(0, result.length); } /** * @return */ public boolean hasPotentialUnsharedExercises() { for (ProblemAndModule problemAndModule : grid.getVisibleItems()) { Problem p=problemAndModule.getProblem(); if (!p.isShared() && (p.getProblemAuthorship()==ProblemAuthorship.ORIGINAL || p.getProblemAuthorship()==ProblemAuthorship.IMPORTED_AND_MODIFIED)) { // an unshared exercise that is original (i.e. new to this author) // or has been imported and modified can be shared return true; } } return false; } }
agpl-3.0
tdefilip/opennms
opennms-tools/sms-reflector/sms-monitor/src/test/java/org/opennms/sms/monitor/TestCallback.java
3809
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2010-2014 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <license@opennms.org> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.sms.monitor; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; import org.opennms.sms.reflector.smsservice.MobileMsgRequest; import org.opennms.sms.reflector.smsservice.MobileMsgResponse; import org.opennms.sms.reflector.smsservice.MobileMsgResponseCallback; import org.opennms.sms.reflector.smsservice.SmsResponse; import org.opennms.sms.reflector.smsservice.UssdResponse; import org.smslib.InboundMessage; import org.smslib.USSDResponse; public class TestCallback implements MobileMsgResponseCallback { CountDownLatch m_latch = new CountDownLatch(1); AtomicReference<MobileMsgResponse> m_response = new AtomicReference<MobileMsgResponse>(null); MobileMsgResponse getResponse() throws InterruptedException { m_latch.await(); return m_response.get(); } /* (non-Javadoc) * @see org.opennms.sms.reflector.smsservice.SmsResponseCallback#handleError(org.opennms.sms.reflector.smsservice.SmsRequest, java.lang.Throwable) */ @Override public void handleError(MobileMsgRequest request, Throwable t) { System.err.println("Error processing SmsRequest: " + request); m_latch.countDown(); } /* (non-Javadoc) * @see org.opennms.sms.reflector.smsservice.SmsResponseCallback#handleResponse(org.opennms.sms.reflector.smsservice.SmsRequest, org.opennms.sms.reflector.smsservice.SmsResponse) */ @Override public boolean handleResponse(MobileMsgRequest request, MobileMsgResponse response) { m_response.set(response); m_latch.countDown(); return true; } /* (non-Javadoc) * @see org.opennms.sms.reflector.smsservice.SmsResponseCallback#handleTimeout(org.opennms.sms.reflector.smsservice.SmsRequest) */ @Override public void handleTimeout(MobileMsgRequest request) { System.err.println("Timeout waiting for SmsRequest: " + request); m_latch.countDown(); } /** * @return * @throws InterruptedException */ public InboundMessage getMessage() throws InterruptedException { MobileMsgResponse response = getResponse(); if (response instanceof SmsResponse) { return ((SmsResponse)response).getMessage(); } return null; } public USSDResponse getUSSDResponse() throws InterruptedException{ MobileMsgResponse response = getResponse(); if (response instanceof UssdResponse) { return ((UssdResponse)response).getMessage(); } return null; } }
agpl-3.0
xwiki-contrib/currikiorg
plugins/asset/src/main/java/org/curriki/xwiki/plugin/asset/other/UnknownAsset.java
489
package org.curriki.xwiki.plugin.asset.other; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.XWikiContext; import org.curriki.xwiki.plugin.asset.Asset; import org.curriki.xwiki.plugin.asset.Constants; /** */ public class UnknownAsset extends Asset { public UnknownAsset(XWikiDocument doc, XWikiContext context) { super(doc, context); } @Override public String getCategorySubtype() { return Constants.ASSET_CATEGORY_SUBTYPE_UNKNOWN; } }
lgpl-2.1
zardam/yajsw
src/yajsw/src/main/java/org/rzo/yajsw/os/ProcessManager.java
1488
/* This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * <p/> * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package org.rzo.yajsw.os; import java.util.List; // TODO: Auto-generated Javadoc /** * The Interface ProcessManager. */ public interface ProcessManager { /** * Creates the process. * * @return the process */ public Process createProcess(); /** * Gets the process. * * @param pid * the pid * * @return the process */ public Process getProcess(int pid); /** * Current process id. * * @return the int */ public int currentProcessId(); /** * Process id of active window. * * @return the int */ public int processIdOfActiveWindow(); /** * Gets the process tree. * * @param pid * the pid * * @return the process tree */ public List getProcessTree(int pid); /** * Task list instance. * * @return the task list */ public TaskList taskListInstance(); public List getProcessIds(); }
lgpl-2.1
4ment/beast-mcmc
src/dr/evomodelxml/treelikelihood/BalancedBeagleTreeLikelihoodParser.java
14366
/* * BalancedBeagleTreeLikelihoodParser.java * * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.evomodelxml.treelikelihood; import dr.evolution.tree.TreeUtils; import dr.evomodel.branchmodel.BranchModel; import dr.evomodel.branchmodel.HomogeneousBranchModel; import dr.evomodel.siteratemodel.GammaSiteRateModel; import dr.evomodel.substmodel.FrequencyModel; import dr.evomodel.substmodel.SubstitutionModel; import dr.evomodel.treelikelihood.AbstractTreeLikelihood; import dr.evomodel.treelikelihood.BeagleTreeLikelihood; import dr.evomodel.treelikelihood.PartialsRescalingScheme; import dr.evolution.alignment.PatternList; import dr.evolution.alignment.Patterns; import dr.evolution.alignment.SitePatterns; import dr.evolution.util.TaxonList; import dr.evomodel.branchratemodel.BranchRateModel; import dr.evomodel.tree.TreeModel; import dr.evomodel.tipstatesmodel.TipStatesModel; import dr.inference.model.CompoundLikelihood; import dr.inference.model.Likelihood; import dr.inference.model.Parameter; import dr.xml.*; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; /** * @author Guy Baele */ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser { //public static final String BEAGLE_INSTANCE_COUNT = "beagle.instance.count"; public static final String TREE_LIKELIHOOD = "balancedTreeLikelihood"; public static final String INSTANCE_COUNT = "instanceCount"; public static final String PARTIALS_RESTRICTION = "partialsRestriction"; public final int TEST_RUNS = 100; public final double TEST_CUTOFF = 1.30; public String getParserName() { return TREE_LIKELIHOOD; } protected BeagleTreeLikelihood createTreeLikelihood(PatternList patternList, TreeModel treeModel, BranchModel branchModel, GammaSiteRateModel siteRateModel, BranchRateModel branchRateModel, TipStatesModel tipStatesModel, boolean useAmbiguities, PartialsRescalingScheme scalingScheme, boolean delayScaling, Map<Set<String>, Parameter> partialsRestrictions, XMLObject xo) throws XMLParseException { return new BeagleTreeLikelihood( patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions ); } public Object parseXMLObject(XMLObject xo) throws XMLParseException { boolean useAmbiguities = xo.getAttribute(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, false); /*int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1); if (instanceCount < 1) { instanceCount = 1; } String ic = System.getProperty(BEAGLE_INSTANCE_COUNT); if (ic != null && ic.length() > 0) { instanceCount = Integer.parseInt(ic); }*/ PatternList patternList = (PatternList) xo.getChild(PatternList.class); TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class); GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class); FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class); BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class); if (branchModel == null) { SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class); if (substitutionModel == null) { substitutionModel = siteRateModel.getSubstitutionModel(); } if (substitutionModel == null) { throw new XMLParseException("No substitution model available for TreeLikelihood: "+xo.getId()); } branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel); } BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class); TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class); // if (xo.getChild(TipStatesModel.class) != null) { // throw new XMLParseException("Sequence Error Models are not supported under BEAGLE yet. Please use Native BEAST Likelihood."); // } PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT; if (xo.hasAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)) { // scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)); if (scalingScheme == null) throw new XMLParseException("Unknown scaling scheme '"+xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)+"' in "+ "OldBeagleTreeLikelihood object '"+xo.getId()); } boolean delayScaling = true; Map<Set<String>, Parameter> partialsRestrictions = null; if (xo.hasChildNamed(PARTIALS_RESTRICTION)) { XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION); TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class); // Parameter parameter = (Parameter) cxo.getChild(Parameter.class); try { TreeUtils.getLeavesForTaxa(treeModel, taxonList); } catch (TreeUtils.MissingTaxonException e) { throw new XMLParseException("Unable to parse taxon list: " + e.getMessage()); } throw new XMLParseException("Restricting internal nodes is not yet implemented. Contact Marc"); } /*if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) { return createTreeLikelihood( patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, partialsRestrictions, xo ); }*/ //first run a test for instanceCount == 1 System.err.println("\nTesting instanceCount == 1"); Likelihood baseLikelihood = createTreeLikelihood( patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo ); double start = System.nanoTime(); for (int i = 0; i < TEST_RUNS; i++) { baseLikelihood.makeDirty(); baseLikelihood.getLogLikelihood(); } double end = System.nanoTime(); double baseResult = end - start; System.err.println("Evaluation took: " + baseResult); // using multiple instances of BEAGLE... if (!(patternList instanceof SitePatterns)) { throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with BEAUti-selected codon partitioning."); } if (tipStatesModel != null) { throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model)."); } //List<Likelihood> likelihoods = new ArrayList<Likelihood>(); List<Likelihood> likelihoods = null; CompoundLikelihood compound = null; int instanceCount = 2; boolean optimal = false; while (optimal == false) { System.err.println("\nCreating instanceCount == " + instanceCount); likelihoods = new ArrayList<Likelihood>(); for (int i = 0; i < instanceCount; i++) { Patterns subPatterns = new Patterns((SitePatterns)patternList, 0, 0, 1, i, instanceCount); AbstractTreeLikelihood treeLikelihood = createTreeLikelihood( subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo); treeLikelihood.setId(xo.getId() + "_" + instanceCount); likelihoods.add(treeLikelihood); } //construct compoundLikelihood compound = new CompoundLikelihood(instanceCount, likelihoods); //test timings System.err.println("\nTesting instanceCount == " + instanceCount); start = System.nanoTime(); for (int i = 0; i < TEST_RUNS; i++) { compound.makeDirty(); compound.getLogLikelihood(); } end = System.nanoTime(); double newResult = end - start; System.err.println("Evaluation took: " + newResult); if (baseResult/newResult > TEST_CUTOFF) { instanceCount++; baseResult = newResult; } else { optimal = true; instanceCount--; System.err.println("\nCreating final BeagleTreeLikelihood with instanceCount: " + instanceCount); likelihoods = new ArrayList<Likelihood>(); for (int i = 0; i < instanceCount; i++) { Patterns subPatterns = new Patterns((SitePatterns)patternList, 0, 0, 1, i, instanceCount); AbstractTreeLikelihood treeLikelihood = createTreeLikelihood( subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo); treeLikelihood.setId(xo.getId() + "_" + instanceCount); likelihoods.add(treeLikelihood); } //construct compoundLikelihood compound = new CompoundLikelihood(instanceCount, likelihoods); } } return compound; /*for (int i = 0; i < instanceCount; i++) { Patterns subPatterns = new Patterns((SitePatterns)patternList, 0, 0, 1, i, instanceCount); AbstractTreeLikelihood treeLikelihood = createTreeLikelihood( subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, partialsRestrictions, xo); treeLikelihood.setId(xo.getId() + "_" + instanceCount); likelihoods.add(treeLikelihood); } return new CompoundLikelihood(likelihoods);*/ } //************************************************************************ // AbstractXMLObjectParser implementation //************************************************************************ public String getParserDescription() { return "This element represents the likelihood of a patternlist on a tree given the site model, with an automated detection of instanceCount."; } public Class getReturnType() { return Likelihood.class; } public static final XMLSyntaxRule[] rules = { AttributeRule.newBooleanRule(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, true), new ElementRule(PatternList.class), new ElementRule(TreeModel.class), new ElementRule(GammaSiteRateModel.class), new ElementRule(BranchModel.class, true), new ElementRule(SubstitutionModel.class, true), new ElementRule(BranchRateModel.class, true), new ElementRule(TipStatesModel.class, true), AttributeRule.newStringRule(BeagleTreeLikelihoodParser.SCALING_SCHEME,true), AttributeRule.newBooleanRule(BeagleTreeLikelihoodParser.DELAY_SCALING,true), new ElementRule(PARTIALS_RESTRICTION, new XMLSyntaxRule[] { new ElementRule(TaxonList.class), new ElementRule(Parameter.class), }, true), new ElementRule(TipStatesModel.class, true), new ElementRule(FrequencyModel.class, true), }; public XMLSyntaxRule[] getSyntaxRules() { return rules; } }
lgpl-2.1
Bhavik3/checkstyle
src/test/resources/com/puppycrawl/tools/checkstyle/checks/indentation/InputInvalidIfIndent.java
10174
package com.puppycrawl.tools.checkstyle.checks.indentation; //indent:0 exp:0 /** //indent:0 exp:0 * This test-input is intended to be checked using following configuration: //indent:1 exp:1 * //indent:1 exp:1 * arrayInitIndent = 4 //indent:1 exp:1 * basicOffset = 4 //indent:1 exp:1 * braceAdjustment = 0 //indent:1 exp:1 * caseIndent = 4 //indent:1 exp:1 * forceStrictCondition = false //indent:1 exp:1 * lineWrappingIndentation = 4 //indent:1 exp:1 * tabWidth = 4 //indent:1 exp:1 * throwsIndent = 4 //indent:1 exp:1 * //indent:1 exp:1 * //indent:1 exp:1 */ //indent:1 exp:1 public class InputInvalidIfIndent { //indent:0 exp:0 // ctor with rcurly on same line //indent:4 exp:4 public InputInvalidIfIndent() { //indent:4 exp:4 } //indent:4 exp:4 // ctor with rcurly on next line //indent:4 exp:4 public InputInvalidIfIndent(int dummy) //indent:4 exp:4 { //indent:4 exp:4 } //indent:4 exp:4 // method with rcurly on same line //indent:4 exp:4 public void method() { //indent:4 exp:4 } //indent:4 exp:4 // method with rcurly on next line //indent:4 exp:4 public void method2() //indent:4 exp:4 { //indent:4 exp:4 } //indent:4 exp:4 // method with a bunch of params //indent:4 exp:4 public void method2(int x, int y, int w, int h) //indent:4 exp:4 { //indent:4 exp:4 } //indent:4 exp:4 // params on multiple lines //indent:4 exp:4 public void method2(int x, int y, int w, int h, //indent:4 exp:4 int x1, int y1, int w1, int h1) //indent:8 exp:>=8 { //indent:4 exp:4 } //indent:4 exp:4 // test ifs //indent:4 exp:4 public void emptyIfTest() //indent:4 exp:4 { //indent:4 exp:4 boolean test = true; //indent:8 exp:8 // lcurly on same line //indent:8 exp:8 if (test) { //indent:1 exp:8 warn } //indent:8 exp:8 // lcurly on next line -- if, rcurly indented too far, lcurly not far enough //indent:8 exp:8 // //indent:8 exp:8 if (test) //indent:9 exp:8 warn { //indent:9 exp:8 warn } //indent:7 exp:8 warn if (test) //indent:6 exp:8 warn { //indent:5 exp:8 warn } //indent:5 exp:8 warn // lcurly for if and else on same line -- too much space after if on same line -- ALLOWED //indent:8 exp:8 if (test) { //indent:8 exp:8 } else { // this is not allowed //indent:10 exp:8 warn } //indent:7 exp:8 warn // lcurly for if and else on same line //indent:8 exp:8 if (test) //indent:9 exp:8 warn { //indent:7 exp:8 warn } //indent:8 exp:8 else //indent:9 exp:8 warn { //indent:8 exp:8 } //indent:9 exp:8 warn // lcurly for if and else on same line -- mixed braces //indent:8 exp:8 if (test) { //indent:10 exp:8 warn } //indent:7 exp:8 warn else //indent:9 exp:8 warn { //indent:7 exp:8 warn } //indent:9 exp:8 warn // lcurly for if and else on same line -- mixed braces //indent:8 exp:8 if (test) //indent:9 exp:8 warn { //indent:9 exp:8 warn } else //indent:9 exp:8 warn { //indent:7 exp:9 warn } //indent:10 exp:8 warn // lcurly for if and else on same line -- mixed braces //indent:8 exp:8 if (test) //indent:6 exp:8 warn { //indent:10 exp:8 warn } else { //indent:10 exp:8 warn } //indent:7 exp:8 warn // lcurly for if and else on same line -- mixed braces, unnested //indent:8 exp:8 if (test) { //indent:5 exp:8 warn } //indent:11 exp:8 warn else { //indent:5 exp:8 warn } //indent:11 exp:8 warn } //indent:4 exp:4 ///// same as above, with statements //indent:4 exp:4 public void populatedIfTest() //indent:4 exp:4 { //indent:4 exp:4 boolean test = false; //indent:8 exp:8 // no braces if //indent:8 exp:8 if (test) //indent:8 exp:8 System.getProperty("blah"); //indent:14 exp:>=12 // no braces if/else //indent:8 exp:8 if (test) //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:>=12 else //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:>=12 // lcurly on same line, and stmt //indent:8 exp:8 if (test) { //indent:8 exp:8 System.getProperty("blah"); //indent:14 exp:12 warn } //indent:8 exp:8 // lcurly on next line and stmt //indent:8 exp:8 if (test) //indent:8 exp:8 { //indent:10 exp:8 warn System.getProperty("blah"); //indent:10 exp:12 warn } //indent:8 exp:8 // lcurly for if and else on same line //indent:8 exp:8 if (test) { //indent:8 exp:8 System. //indent:14 exp:12 warn getProperty("blah"); //indent:10 exp:12 warn } else { //indent:8 exp:8 System. //indent:10 exp:12 warn getProperty("blah"); //indent:8 exp:12 warn } //indent:8 exp:8 // lcurly for if and else on same line //indent:8 exp:8 if (test) //indent:8 exp:8 { //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:12 System.getProperty("blah"); //indent:16 exp:12 warn } //indent:9 exp:8 warn else //indent:8 exp:8 { //indent:8 exp:8 System.getProperty("blah"); //indent:16 exp:12 warn System.getProperty("blah"); //indent:12 exp:12 } //indent:8 exp:8 // lcurly for if and else on same line -- mixed braces //indent:8 exp:8 if (test) { //indent:8 exp:8 System.getProperty("blah"); //indent:0 exp:12 warn } //indent:8 exp:8 else //indent:8 exp:8 { //indent:8 exp:8 System.getProperty("blah"); //indent:40 exp:12 warn } //indent:8 exp:8 // lcurly for if and else on same line -- mixed braces //indent:8 exp:8 if (test) //indent:8 exp:8 { //indent:8 exp:8 System.getProperty("blah"); //indent:14 exp:12 warn } else //indent:8 exp:8 { //indent:8 exp:8 System.getProperty("blah"); //indent:14 exp:12 warn } //indent:8 exp:8 // lcurly for if and else on same line -- mixed braces //indent:8 exp:8 if (test) //indent:8 exp:8 { //indent:8 exp:8 System.getProperty("blah"); //indent:10 exp:12 warn } else { //indent:8 exp:8 System.getProperty("blah"); //indent:10 exp:12 warn } //indent:8 exp:8 // lcurly for if and else on same line -- mixed braces, unnested //indent:8 exp:8 if (test) { //indent:10 exp:8 warn System.getProperty("blah"); //indent:14 exp:12 warn } //indent:10 exp:8 warn else { //indent:10 exp:8 warn System.getProperty("blah"); //indent:14 exp:12 warn } //indent:10 exp:8 warn if (test //indent:8 exp:8 && 7 < 8 && 8 < 9 //indent:9 exp:12 warn && 10 < 11) { //indent:11 exp:12 warn } //indent:8 exp:8 if (test) //indent:8 exp:8 return; //indent:10 exp:12 warn if (test) { //indent:8 exp:8 } else if (7 < 8) { //indent:7 exp:8 warn } else if (8 < 9) { //indent:8 exp:8 } //indent:8 exp:8 if (test) { //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:>=12 } else if (7 < 8) { //indent:8 exp:8 System.getProperty("blah"); //indent:10 exp:>=12 warn } else if (8 < 9) { //indent:8 exp:8 System.getProperty("blah"); //indent:10 exp:>=12 warn } //indent:8 exp:8 if (test) //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:12 else if (7 < 8) //indent:8 exp:8 System.getProperty("blah"); //indent:10 exp:10 else if (8 < 9) //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:12 // //indent:8 exp:8 if (test) { //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:12 } else //indent:8 exp:8 if (7 < 8) { //indent:10 exp:12 warn System.getProperty("blah"); //indent:16 exp:16 } else //indent:12 exp:12 if (8 < 9) { //indent:16 exp:16 System.getProperty("blah"); //indent:18 exp:20 warn } //indent:16 exp:16 if (test) { //indent:8 exp:8 System.getProperty("blah"); } //indent:12 exp:8 warn } //indent:4 exp:4 public void parenIfTest() { //indent:4 exp:4 boolean test = true; //indent:8 exp:8 if (test //indent:8 exp:8 ) { //indent:10 exp:8 warn System.getProperty("blah"); //indent:12 exp:12 } //indent:8 exp:8 if (test //indent:8 exp:8 ) //indent:6 exp:8 warn { //indent:8 exp:8 System.getProperty("blah"); //indent:12 exp:12 } //indent:8 exp:8 if //indent:8 exp:8 ( //indent:6 exp:8,12 warn test //indent:12 exp:12 ) { //indent:6 exp:8 warn System.getProperty("blah"); //indent:12 exp:12 } //indent:8 exp:8 } //indent:4 exp:4 } //indent:0 exp:0
lgpl-2.1
hpehl/testsuite
basic/src/test/java/org/jboss/hal/testsuite/test/configuration/remoting/outboundconnections/LocalOutboundConnectionTestCase.java
6964
package org.jboss.hal.testsuite.test.configuration.remoting.outboundconnections; import org.apache.commons.lang.RandomStringUtils; import org.jboss.arquillian.container.test.api.RunAsClient; import org.jboss.arquillian.drone.api.annotation.Drone; import org.jboss.arquillian.graphene.page.Page; import org.jboss.arquillian.junit.Arquillian; import org.jboss.hal.testsuite.category.Shared; import org.jboss.hal.testsuite.creaper.ManagementClientProvider; import org.jboss.hal.testsuite.creaper.ResourceVerifier; import org.jboss.hal.testsuite.page.config.RemotingSubsystemPage; import org.jboss.hal.testsuite.util.ConfigChecker; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.openqa.selenium.WebDriver; import org.wildfly.extras.creaper.core.CommandFailedException; import org.wildfly.extras.creaper.core.online.ModelNodeResult; import org.wildfly.extras.creaper.core.online.OnlineManagementClient; import org.wildfly.extras.creaper.core.online.operations.Address; import org.wildfly.extras.creaper.core.online.operations.OperationException; import org.wildfly.extras.creaper.core.online.operations.Operations; import org.wildfly.extras.creaper.core.online.operations.Values; import org.wildfly.extras.creaper.core.online.operations.admin.Administration; import java.io.IOException; import java.util.concurrent.TimeoutException; @RunWith(Arquillian.class) @RunAsClient @Category(Shared.class) public class LocalOutboundConnectionTestCase { @Drone private WebDriver browser; @Page private RemotingSubsystemPage page; private static final OnlineManagementClient client = ManagementClientProvider.createOnlineManagementClient(); private static final Administration administration = new Administration(client); private static final Operations operations = new Operations(client); private static final OutboundSocketBindingProvider socketBindingProvider = new OutboundSocketBindingProvider .Builder(OutboundSocketBindingProvider.Type.LOCAL) .client(client) .build(); private static final String SOCKET_BINDING_1 = "local-outbound-socket-binding1_" + RandomStringUtils.randomAlphanumeric(5), SOCKET_BINDING_2 = "local-outbound-socket-binding2_" + RandomStringUtils.randomAlphanumeric(5), SOCKET_BINDING_3 = "local-outbound-socket-binding3_" + RandomStringUtils.randomAlphanumeric(5), SOCKET_BINDING_4 = "local-outbound-socket-binding4_" + RandomStringUtils.randomAlphanumeric(5), LOCAL_OUTBOUND_CONNECTION = "local-outbound-connection", OUTBOUND_SOCKET_BINDING_REF = "outbound-socket-binding-ref"; private static final Address REMOTING_SUBSYSTEM_ADDRESS = Address.subsystem("remoting"), LOCAL_OUTBOUND_CONNECTION_ADDRESS = REMOTING_SUBSYSTEM_ADDRESS.and(LOCAL_OUTBOUND_CONNECTION, "local_outbound_" + RandomStringUtils.randomAlphanumeric(5)), LOCAL_OUTBOUND_CONNECTION_TBA_ADDRESS = REMOTING_SUBSYSTEM_ADDRESS.and(LOCAL_OUTBOUND_CONNECTION, "local_outbound_tba_" + RandomStringUtils.randomAlphanumeric(5)), LOCAL_OUTBOUND_CONNECTION_TBR_ADDRESS = REMOTING_SUBSYSTEM_ADDRESS.and(LOCAL_OUTBOUND_CONNECTION, "local_outbound_tbr_" + RandomStringUtils.randomAlphanumeric(5)); @BeforeClass public static void beforeClass() throws CommandFailedException, InterruptedException, TimeoutException, IOException { socketBindingProvider.createOutboundSocketBinding(SOCKET_BINDING_1); socketBindingProvider.createOutboundSocketBinding(SOCKET_BINDING_2); socketBindingProvider.createOutboundSocketBinding(SOCKET_BINDING_3); socketBindingProvider.createOutboundSocketBinding(SOCKET_BINDING_4); operations.add(LOCAL_OUTBOUND_CONNECTION_ADDRESS, Values.of(OUTBOUND_SOCKET_BINDING_REF, SOCKET_BINDING_1)).assertSuccess(); operations.add(LOCAL_OUTBOUND_CONNECTION_TBR_ADDRESS, Values.of(OUTBOUND_SOCKET_BINDING_REF, SOCKET_BINDING_2)).assertSuccess(); administration.reloadIfRequired(); } @Before public void before() { page.navigate(); page.switchToOutboundConnectionsTab(); page.getResourceManager().selectByName(LOCAL_OUTBOUND_CONNECTION_ADDRESS.getLastPairValue()); } @AfterClass public static void afterClass() throws CommandFailedException, InterruptedException, TimeoutException, IOException, OperationException { try { operations.removeIfExists(LOCAL_OUTBOUND_CONNECTION_ADDRESS); operations.removeIfExists(LOCAL_OUTBOUND_CONNECTION_TBA_ADDRESS); operations.removeIfExists(LOCAL_OUTBOUND_CONNECTION_TBR_ADDRESS); administration.reloadIfRequired(); socketBindingProvider.clean(); administration.reloadIfRequired(); } finally { client.close(); } } @Test public void addLocalOutboundConnection() throws Exception { page.addLocalOutboundConnection() .name(LOCAL_OUTBOUND_CONNECTION_TBA_ADDRESS.getLastPairValue()) .outboundSocketBindingRef(SOCKET_BINDING_3) .saveAndDismissReloadRequiredWindow(); Assert.assertTrue(page.getResourceManager().isResourcePresent(LOCAL_OUTBOUND_CONNECTION_TBA_ADDRESS.getLastPairValue())); new ResourceVerifier(LOCAL_OUTBOUND_CONNECTION_TBA_ADDRESS, client).verifyExists(); } @Test public void removeLocalOutboundConnection() throws Exception { page.getResourceManager() .removeResource(LOCAL_OUTBOUND_CONNECTION_TBR_ADDRESS.getLastPairValue()) .confirmAndDismissReloadRequiredMessage(); Assert.assertFalse(page.getResourceManager().isResourcePresent(LOCAL_OUTBOUND_CONNECTION_TBR_ADDRESS.getLastPairValue())); new ResourceVerifier(LOCAL_OUTBOUND_CONNECTION_TBR_ADDRESS, client).verifyDoesNotExist(); } @Test public void editOutboundSocketBindingRef() throws Exception { final ModelNodeResult originalModelNodeResult = operations.readAttribute(LOCAL_OUTBOUND_CONNECTION_ADDRESS, OUTBOUND_SOCKET_BINDING_REF); originalModelNodeResult.assertSuccess(); try { new ConfigChecker.Builder(client, LOCAL_OUTBOUND_CONNECTION_ADDRESS) .configFragment(page.getConfigFragment()) .editAndSave(ConfigChecker.InputType.TEXT, OUTBOUND_SOCKET_BINDING_REF, SOCKET_BINDING_4) .verifyFormSaved() .verifyAttribute(OUTBOUND_SOCKET_BINDING_REF, SOCKET_BINDING_4); } finally { operations.writeAttribute(LOCAL_OUTBOUND_CONNECTION_ADDRESS, OUTBOUND_SOCKET_BINDING_REF, SOCKET_BINDING_4); } } }
lgpl-2.1
PaytmLabs/cassandra
src/java/org/apache/cassandra/service/ActiveRepairService.java
19588
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service; import java.io.File; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.concurrent.JMXConfigurableThreadPoolExecutor; import org.apache.cassandra.concurrent.NamedThreadFactory; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.gms.FailureDetector; import org.apache.cassandra.gms.Gossiper; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.SSTableReader; import org.apache.cassandra.locator.TokenMetadata; import org.apache.cassandra.net.IAsyncCallbackWithFailure; import org.apache.cassandra.net.MessageIn; import org.apache.cassandra.net.MessageOut; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.repair.*; import org.apache.cassandra.repair.messages.PrepareMessage; import org.apache.cassandra.repair.messages.RepairMessage; import org.apache.cassandra.repair.messages.SyncComplete; import org.apache.cassandra.repair.messages.ValidationComplete; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.UUIDGen; import org.apache.cassandra.utils.concurrent.Ref; import org.apache.cassandra.utils.concurrent.Refs; /** * ActiveRepairService is the starting point for manual "active" repairs. * * Each user triggered repair will correspond to one or multiple repair session, * one for each token range to repair. On repair session might repair multiple * column families. For each of those column families, the repair session will * request merkle trees for each replica of the range being repaired, diff those * trees upon receiving them, schedule the streaming ofthe parts to repair (based on * the tree diffs) and wait for all those operation. See RepairSession for more * details. * * The creation of a repair session is done through the submitRepairSession that * returns a future on the completion of that session. */ public class ActiveRepairService { private static final Logger logger = LoggerFactory.getLogger(ActiveRepairService.class); // singleton enforcement public static final ActiveRepairService instance = new ActiveRepairService(); public static final long UNREPAIRED_SSTABLE = 0; private static final ThreadPoolExecutor executor; static { executor = new JMXConfigurableThreadPoolExecutor(4, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new NamedThreadFactory("AntiEntropySessions"), "internal"); } public static enum Status { STARTED, SESSION_SUCCESS, SESSION_FAILED, FINISHED } /** * A map of active coordinator session. */ private final ConcurrentMap<UUID, RepairSession> sessions; private final ConcurrentMap<UUID, ParentRepairSession> parentRepairSessions; /** * Protected constructor. Use ActiveRepairService.instance. */ protected ActiveRepairService() { sessions = new ConcurrentHashMap<>(); parentRepairSessions = new ConcurrentHashMap<>(); } /** * Requests repairs for the given keyspace and column families. * * @return Future for asynchronous call or null if there is no need to repair */ public RepairFuture submitRepairSession(UUID parentRepairSession, Range<Token> range, String keyspace, RepairParallelism parallelismDegree, Set<InetAddress> endpoints, String... cfnames) { if (cfnames.length == 0) return null; RepairSession session = new RepairSession(parentRepairSession, range, keyspace, parallelismDegree, endpoints, cfnames); if (session.endpoints.isEmpty()) return null; RepairFuture futureTask = new RepairFuture(session); executor.execute(futureTask); return futureTask; } public void addToActiveSessions(RepairSession session) { sessions.put(session.getId(), session); Gossiper.instance.register(session); FailureDetector.instance.registerFailureDetectionEventListener(session); } public void removeFromActiveSessions(RepairSession session) { Gossiper.instance.unregister(session); sessions.remove(session.getId()); } public synchronized void terminateSessions() { for (RepairSession session : sessions.values()) { session.forceShutdown(); } parentRepairSessions.clear(); } // for testing only. Create a session corresponding to a fake request and // add it to the sessions (avoid NPE in tests) RepairFuture submitArtificialRepairSession(RepairJobDesc desc) { Set<InetAddress> neighbours = new HashSet<>(); neighbours.addAll(ActiveRepairService.getNeighbors(desc.keyspace, desc.range, null, null)); RepairSession session = new RepairSession(desc.parentSessionId, desc.sessionId, desc.range, desc.keyspace, RepairParallelism.PARALLEL, neighbours, new String[]{desc.columnFamily}); sessions.put(session.getId(), session); RepairFuture futureTask = new RepairFuture(session); executor.execute(futureTask); return futureTask; } /** * Return all of the neighbors with whom we share the provided range. * * @param keyspaceName keyspace to repair * @param toRepair token to repair * @param dataCenters the data centers to involve in the repair * * @return neighbors with whom we share the provided range */ public static Set<InetAddress> getNeighbors(String keyspaceName, Range<Token> toRepair, Collection<String> dataCenters, Collection<String> hosts) { StorageService ss = StorageService.instance; Map<Range<Token>, List<InetAddress>> replicaSets = ss.getRangeToAddressMap(keyspaceName); Range<Token> rangeSuperSet = null; for (Range<Token> range : ss.getLocalRanges(keyspaceName)) { if (range.contains(toRepair)) { rangeSuperSet = range; break; } else if (range.intersects(toRepair)) { throw new IllegalArgumentException("Requested range intersects a local range but is not fully contained in one; this would lead to imprecise repair"); } } if (rangeSuperSet == null || !replicaSets.containsKey(rangeSuperSet)) return Collections.emptySet(); Set<InetAddress> neighbors = new HashSet<>(replicaSets.get(rangeSuperSet)); neighbors.remove(FBUtilities.getBroadcastAddress()); if (dataCenters != null) { TokenMetadata.Topology topology = ss.getTokenMetadata().cloneOnlyTokenMap().getTopology(); Set<InetAddress> dcEndpoints = Sets.newHashSet(); Multimap<String,InetAddress> dcEndpointsMap = topology.getDatacenterEndpoints(); for (String dc : dataCenters) { Collection<InetAddress> c = dcEndpointsMap.get(dc); if (c != null) dcEndpoints.addAll(c); } return Sets.intersection(neighbors, dcEndpoints); } else if (hosts != null) { Set<InetAddress> specifiedHost = new HashSet<>(); for (final String host : hosts) { try { final InetAddress endpoint = InetAddress.getByName(host.trim()); if (endpoint.equals(FBUtilities.getBroadcastAddress()) || neighbors.contains(endpoint)) specifiedHost.add(endpoint); } catch (UnknownHostException e) { throw new IllegalArgumentException("Unknown host specified " + host, e); } } if (!specifiedHost.contains(FBUtilities.getBroadcastAddress())) throw new IllegalArgumentException("The current host must be part of the repair"); if (specifiedHost.size() <= 1) { String msg = "Repair requires at least two endpoints that are neighbours before it can continue, the endpoint used for this repair is %s, " + "other available neighbours are %s but these neighbours were not part of the supplied list of hosts to use during the repair (%s)."; throw new IllegalArgumentException(String.format(msg, specifiedHost, neighbors, hosts)); } specifiedHost.remove(FBUtilities.getBroadcastAddress()); return specifiedHost; } return neighbors; } public synchronized UUID prepareForRepair(Set<InetAddress> endpoints, Collection<Range<Token>> ranges, List<ColumnFamilyStore> columnFamilyStores) { UUID parentRepairSession = UUIDGen.getTimeUUID(); registerParentRepairSession(parentRepairSession, columnFamilyStores, ranges); final CountDownLatch prepareLatch = new CountDownLatch(endpoints.size()); final AtomicBoolean status = new AtomicBoolean(true); final Set<String> failedNodes = Collections.synchronizedSet(new HashSet<String>()); IAsyncCallbackWithFailure callback = new IAsyncCallbackWithFailure() { public void response(MessageIn msg) { prepareLatch.countDown(); } public boolean isLatencyForSnitch() { return false; } public void onFailure(InetAddress from) { status.set(false); failedNodes.add(from.getHostAddress()); prepareLatch.countDown(); } }; List<UUID> cfIds = new ArrayList<>(columnFamilyStores.size()); for (ColumnFamilyStore cfs : columnFamilyStores) cfIds.add(cfs.metadata.cfId); for(InetAddress neighbour : endpoints) { PrepareMessage message = new PrepareMessage(parentRepairSession, cfIds, ranges); MessageOut<RepairMessage> msg = message.createMessage(); MessagingService.instance().sendRR(msg, neighbour, callback, TimeUnit.HOURS.toMillis(1), true); } try { prepareLatch.await(1, TimeUnit.HOURS); } catch (InterruptedException e) { parentRepairSessions.remove(parentRepairSession); throw new RuntimeException("Did not get replies from all endpoints. List of failed endpoint(s): " + failedNodes.toString(), e); } if (!status.get()) { parentRepairSessions.remove(parentRepairSession); throw new RuntimeException("Did not get positive replies from all endpoints. List of failed endpoint(s): " + failedNodes.toString()); } return parentRepairSession; } public synchronized void registerParentRepairSession(UUID parentRepairSession, List<ColumnFamilyStore> columnFamilyStores, Collection<Range<Token>> ranges) { parentRepairSessions.put(parentRepairSession, new ParentRepairSession(columnFamilyStores, ranges, System.currentTimeMillis())); } public Set<SSTableReader> currentlyRepairing(UUID cfId, UUID parentRepairSession) { Set<SSTableReader> repairing = new HashSet<>(); for (Map.Entry<UUID, ParentRepairSession> entry : parentRepairSessions.entrySet()) { Collection<SSTableReader> sstables = entry.getValue().sstableMap.get(cfId); if (sstables != null && !entry.getKey().equals(parentRepairSession)) repairing.addAll(sstables); } return repairing; } /** * Run final process of repair. * This removes all resources held by parent repair session, after performing anti compaction if necessary. * * @param parentSession Parent session ID * @param neighbors Repair participants (not including self) * @throws InterruptedException * @throws ExecutionException */ public synchronized ListenableFuture<?> finishParentSession(UUID parentSession, Set<InetAddress> neighbors, boolean doAntiCompaction) throws InterruptedException, ExecutionException { // We want to remove parent repair session whether we succeeded or not, so send AnticompactionRequest anyway. // Each replica node determines if anticompaction is needed. List<ListenableFuture<?>> tasks = new ArrayList<>(neighbors.size() + 1); for (InetAddress neighbor : neighbors) { AnticompactionTask task = new AnticompactionTask(parentSession, neighbor, doAntiCompaction); tasks.add(task); task.run(); // 'run' is just sending message } if (doAntiCompaction) { tasks.add(doAntiCompaction(parentSession)); } else { removeParentRepairSession(parentSession); } return Futures.successfulAsList(tasks); } public ParentRepairSession getParentRepairSession(UUID parentSessionId) { return parentRepairSessions.get(parentSessionId); } public synchronized ParentRepairSession removeParentRepairSession(UUID parentSessionId) { return parentRepairSessions.remove(parentSessionId); } /** * Submit anti-compaction jobs to CompactionManager. * When all jobs are done, parent repair session is removed whether those are suceeded or not. * * @param parentRepairSession parent repair session ID * @return Future result of all anti-compaction jobs. */ public ListenableFuture<List<Object>> doAntiCompaction(final UUID parentRepairSession) { assert parentRepairSession != null; ParentRepairSession prs = getParentRepairSession(parentRepairSession); List<ListenableFuture<?>> futures = new ArrayList<>(); for (Map.Entry<UUID, ColumnFamilyStore> columnFamilyStoreEntry : prs.columnFamilyStores.entrySet()) { Refs<SSTableReader> sstables = prs.getAndReferenceSSTables(columnFamilyStoreEntry.getKey()); ColumnFamilyStore cfs = columnFamilyStoreEntry.getValue(); futures.add(CompactionManager.instance.submitAntiCompaction(cfs, prs.ranges, sstables, prs.repairedAt)); } ListenableFuture<List<Object>> allAntiCompactionResults = Futures.successfulAsList(futures); allAntiCompactionResults.addListener(new Runnable() { @Override public void run() { removeParentRepairSession(parentRepairSession); } }, MoreExecutors.sameThreadExecutor()); return allAntiCompactionResults; } public void handleMessage(InetAddress endpoint, RepairMessage message) { RepairJobDesc desc = message.desc; RepairSession session = sessions.get(desc.sessionId); if (session == null) return; switch (message.messageType) { case VALIDATION_COMPLETE: ValidationComplete validation = (ValidationComplete) message; session.validationComplete(desc, endpoint, validation.tree); break; case SYNC_COMPLETE: // one of replica is synced. SyncComplete sync = (SyncComplete) message; session.syncComplete(desc, sync.nodes, sync.success); break; default: break; } } public static class ParentRepairSession { public final Map<UUID, ColumnFamilyStore> columnFamilyStores = new HashMap<>(); public final Collection<Range<Token>> ranges; public final Map<UUID, Set<SSTableReader>> sstableMap; public final long repairedAt; public ParentRepairSession(List<ColumnFamilyStore> columnFamilyStores, Collection<Range<Token>> ranges, long repairedAt) { for (ColumnFamilyStore cfs : columnFamilyStores) this.columnFamilyStores.put(cfs.metadata.cfId, cfs); this.ranges = ranges; this.sstableMap = new HashMap<>(); this.repairedAt = repairedAt; } public synchronized Refs<SSTableReader> getAndReferenceSSTables(UUID cfId) { Set<SSTableReader> sstables = sstableMap.get(cfId); Iterator<SSTableReader> sstableIterator = sstables.iterator(); ImmutableMap.Builder<SSTableReader, Ref<SSTableReader>> references = ImmutableMap.builder(); while (sstableIterator.hasNext()) { SSTableReader sstable = sstableIterator.next(); if (!new File(sstable.descriptor.filenameFor(Component.DATA)).exists()) { sstableIterator.remove(); } else { Ref<SSTableReader> ref = sstable.tryRef(); if (ref == null) sstableIterator.remove(); else references.put(sstable, ref); } } return new Refs<>(references.build()); } public void addSSTables(UUID cfId, Collection<SSTableReader> sstables) { Set<SSTableReader> existingSSTables = this.sstableMap.get(cfId); if (existingSSTables == null) existingSSTables = new HashSet<>(); existingSSTables.addAll(sstables); this.sstableMap.put(cfId, existingSSTables); } @Override public String toString() { return "ParentRepairSession{" + "columnFamilyStores=" + columnFamilyStores + ", ranges=" + ranges + ", sstableMap=" + sstableMap + ", repairedAt=" + repairedAt + '}'; } } }
apache-2.0
gradle/gradle
subprojects/launcher/src/main/java/org/gradle/launcher/daemon/bootstrap/ForegroundDaemonAction.java
2500
/* * Copyright 2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.launcher.daemon.bootstrap; import org.gradle.internal.classpath.DefaultClassPath; import org.gradle.internal.logging.LoggingManagerInternal; import org.gradle.internal.service.ServiceRegistry; import org.gradle.launcher.daemon.configuration.DaemonServerConfiguration; import org.gradle.launcher.daemon.registry.DaemonRegistry; import org.gradle.launcher.daemon.server.Daemon; import org.gradle.launcher.daemon.server.DaemonServices; import org.gradle.launcher.daemon.server.MasterExpirationStrategy; import org.gradle.launcher.daemon.server.expiry.DaemonExpirationStrategy; import static org.gradle.launcher.daemon.server.api.DaemonStateControl.State.Idle; public class ForegroundDaemonAction implements Runnable { private final ServiceRegistry loggingRegistry; private final DaemonServerConfiguration configuration; public ForegroundDaemonAction(ServiceRegistry loggingRegistry, DaemonServerConfiguration configuration) { this.loggingRegistry = loggingRegistry; this.configuration = configuration; } @Override public void run() { LoggingManagerInternal loggingManager = loggingRegistry.newInstance(LoggingManagerInternal.class); loggingManager.start(); DaemonServices daemonServices = new DaemonServices(configuration, loggingRegistry, loggingManager, DefaultClassPath.of()); Daemon daemon = daemonServices.get(Daemon.class); DaemonRegistry daemonRegistry = daemonServices.get(DaemonRegistry.class); DaemonExpirationStrategy expirationStrategy = daemonServices.get(MasterExpirationStrategy.class); daemon.start(); try { daemonRegistry.markState(daemon.getAddress(), Idle); daemon.stopOnExpiration(expirationStrategy, configuration.getPeriodicCheckIntervalMs()); } finally { daemon.stop(); } } }
apache-2.0
code-disaster/VisEditor
Plugins/SpineRuntime/src/com/kotcrab/vis/plugin/spine/runtime/SpineAssetDescriptor.java
2656
/* * Spine Runtimes Software License * Version 2.3 * * Copyright (c) 2013-2015, Esoteric Software * All rights reserved. * * You are granted a perpetual, non-exclusive, non-sublicensable and * non-transferable license to use, install, execute and perform the Spine * Runtimes Software (the "Software") and derivative works solely for personal * or internal use. Without the written permission of Esoteric Software (see * Section 2 of the Spine Software License Agreement), you may not (a) modify, * translate, adapt or otherwise create derivative works, improvements of the * Software or develop new applications using the Software or (b) remove, * delete, alter or obscure any trademarks or any copyright, trademark, patent * or other intellectual property or proprietary rights notices on or in the * Software, including any copy thereof. Redistributions in binary or source * form must include this license and terms. * * THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.kotcrab.vis.plugin.spine.runtime; import com.kotcrab.vis.runtime.assets.VisAssetDescriptor; public class SpineAssetDescriptor implements VisAssetDescriptor { private String atlasPath; private String skeletonPath; private float scale; //ignored in compare public SpineAssetDescriptor () { } public SpineAssetDescriptor (String atlasPath, String skeletonPath, float scale) { this.atlasPath = atlasPath; this.skeletonPath = skeletonPath; this.scale = scale; } public String getAtlasPath () { return atlasPath; } public String getSkeletonPath () { return skeletonPath; } @Override public boolean compare (VisAssetDescriptor asset) { if (asset instanceof SpineAssetDescriptor == false) return false; SpineAssetDescriptor desc = (SpineAssetDescriptor) asset; if (atlasPath.equals(desc.atlasPath) == false) return false; return skeletonPath.equals(desc.skeletonPath); } public float getScale () { return scale; } }
apache-2.0
vhalbert/modeshape
connectors/modeshape-connector-git/src/test/java/org/modeshape/connector/git/GitConnectorTest.java
18526
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.connector.git; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsSame.sameInstance; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import javax.jcr.Binary; import javax.jcr.Item; import javax.jcr.Node; import javax.jcr.NodeIterator; import javax.jcr.Property; import javax.jcr.PropertyIterator; import javax.jcr.Value; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.ListBranchCommand; import org.eclipse.jgit.storage.file.WindowCacheConfig; import org.eclipse.jgit.storage.pack.PackConfig; import org.eclipse.jgit.transport.RefSpec; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.modeshape.common.FixFor; import org.modeshape.common.util.IoUtil; import org.modeshape.jcr.MultiUseAbstractTest; import org.modeshape.jcr.RepositoryConfiguration; import org.modeshape.jcr.api.Session; import org.modeshape.jcr.api.federation.FederationManager; /** * Unit test for {@link org.modeshape.connector.git.GitConnector} */ public class GitConnectorTest extends MultiUseAbstractTest { private Node testRoot; @BeforeClass public static void beforeAll() throws Exception { loadGitRepositoryData(); RepositoryConfiguration config = RepositoryConfiguration.read("config/repo-config-git-federation.json"); startRepository(config); Session session = getSession(); Node testRoot = session.getRootNode().addNode("repos"); session.save(); FederationManager fedMgr = session.getWorkspace().getFederationManager(); fedMgr.createProjection(testRoot.getPath(), "remote-git-repo", "/", "git-modeshape-remote"); fedMgr.createProjection(testRoot.getPath(), "local-git-repo", "/", "git-modeshape-local"); } private static void loadGitRepositoryData() throws Exception { try (Git git = Git.open(new File("../.."))) { // the tests expect a series of remote branches and tags from origin, so if they're not present in the clone // where the test is running, we need to load them... List<RefSpec> refsToFetch = new ArrayList<>(); List<String> tagNames = git.tagList().call().stream() .map(ref -> ref.getName().replace("refs/tags/", "")) .collect(Collectors.toList()); Arrays.stream(expectedTagNames()) .filter(tagName -> !tagNames.contains(tagName)) .map(tagName -> new RefSpec("+refs/tags/" + tagName + ":refs/remotes/origin/" + tagName)) .forEach(refsToFetch::add); List<String> branchNames = git.branchList().setListMode(ListBranchCommand.ListMode.ALL).call() .stream() .map(ref -> ref.getName() .replace("refs/heads/", "") .replace("refs/remotes/origin/", "")) .collect(Collectors.toList()); Arrays.stream(expectedRemoteBranchNames()) .filter(branchName -> !branchNames.contains(branchName)) .map(branchName -> new RefSpec("+refs/heads/" + branchName + ":refs/remotes/origin/" + branchName)) .forEach(refsToFetch::add); if (!refsToFetch.isEmpty()) { // next fetch all the remote refs which we need for the test git.fetch().setRefSpecs(refsToFetch).call(); } } } @AfterClass public static void afterAll() throws Exception { MultiUseAbstractTest.afterAll(); } @Before public void before() throws Exception { testRoot = getSession().getRootNode().getNode("repos"); } protected Node gitRemoteNode() throws Exception { return testRoot.getNode("git-modeshape-remote"); } protected Node gitLocalNode() throws Exception { return testRoot.getNode("git-modeshape-local"); } @Test public void shouldReadFederatedNodeInProjection() throws Exception { Node git = gitRemoteNode(); assertThat(git, is(notNullValue())); assertThat(git.getParent(), is(sameInstance(testRoot))); assertThat(git.getPath(), is(testRoot.getPath() + "/git-modeshape-remote")); assertThat(git.getName(), is("git-modeshape-remote")); } @Test public void shouldReadTags() throws Exception { Node git = gitRemoteNode(); Node tags = git.getNode("tags"); assertChildrenInclude("Make sure you run <git fetch --tags>", tags, expectedTagNames()); } @Test public void shouldReadRemoteBranches() throws Exception { Node git = gitRemoteNode(); Node branches = git.getNode("branches"); assertChildrenInclude(branches, expectedRemoteBranchNames()); } @Test @FixFor( "MODE-2426" ) public void shouldReadLocalBranches() throws Exception { Node git = gitLocalNode(); Node branches = git.getNode("branches"); assertChildrenInclude(branches, "master"); } @Test public void shouldReadTreeSubgraph() throws Exception { Node git = gitRemoteNode(); Node tree = git.getNode("tree"); navigate(tree, false, 100, 2); } @Test public void shouldReadCommitSubgraph() throws Exception { Node git = gitRemoteNode(); Node commit = git.getNode("commit"); navigate(commit, false, 100, 2); } @FixFor( "MODE-1732" ) @Test public void shouldFollowReferenceFromRecentTagToCommit() throws Exception { Node git = gitRemoteNode(); Node tag = git.getNode("tags/modeshape-3.0.0.Final"); assertThat(tag.getProperty("git:objectId").getString(), is(notNullValue())); assertThat(tag.getProperty("git:tree").getString(), is(notNullValue())); assertThat(tag.getProperty("git:history").getString(), is(notNullValue())); Node tagTree = tag.getProperty("git:tree").getNode(); assertThat(tagTree.getPath(), is(treePathFor(tag))); assertChildrenInclude(tagTree, expectedTopLevelFileAndFolderNames()); // Load some of the child nodes ... Node pomFile = tagTree.getNode("pom.xml"); assertThat(pomFile.getPrimaryNodeType().getName(), is("git:file")); assertNodeHasObjectIdProperty(pomFile); assertNodeHasCommittedProperties(pomFile); Node pomContent = pomFile.getNode("jcr:content"); assertNodeHasCommittedProperties(pomContent); assertThat(pomContent.getProperty("jcr:data").getString(), is(notNullValue())); Node readmeFile = tagTree.getNode("README.md"); assertThat(readmeFile.getPrimaryNodeType().getName(), is("git:file")); assertNodeHasObjectIdProperty(readmeFile); assertNodeHasCommittedProperties(readmeFile); Node readmeContent = readmeFile.getNode("jcr:content"); assertNodeHasCommittedProperties(readmeContent); assertThat(readmeContent.getProperty("jcr:data").getString(), is(notNullValue())); Node parentModule = tagTree.getNode("modeshape-parent"); assertThat(parentModule.getPrimaryNodeType().getName(), is("git:folder")); assertNodeHasObjectIdProperty(parentModule); assertNodeHasCommittedProperties(parentModule); } protected String treePathFor( Node node ) throws Exception { Node git = gitRemoteNode(); String commitId = node.getProperty("git:objectId").getString(); return git.getPath() + "/tree/" + commitId; } @Test public void shouldFollowReferenceFromOldTagToCommit() throws Exception { Node git = gitRemoteNode(); Node tag = git.getNode("tags/dna-0.2"); assertThat(tag.getProperty("git:objectId").getString(), is(notNullValue())); assertThat(tag.getProperty("git:tree").getString(), is(notNullValue())); assertThat(tag.getProperty("git:history").getString(), is(notNullValue())); Node tagTree = tag.getProperty("git:tree").getNode(); assertThat(tagTree.getPath(), is(treePathFor(tag))); assertChildrenInclude(tagTree, "pom.xml", "dna-jcr", "dna-common", ".project"); } @Test public void shouldContainTagsAndBranchNamesAndCommitsUnderTreeNode() throws Exception { Node git = gitRemoteNode(); Node tree = git.getNode("tree"); assertThat(tree.getPrimaryNodeType().getName(), is("git:trees")); assertChildrenInclude(tree, expectedRemoteBranchNames()); assertChildrenInclude("Make sure you run <git fetch --tags>", tree, expectedTagNames()); } @Test public void shouldFindMasterBranchAsPrimaryItemUnderBranchNode() throws Exception { Node git = gitRemoteNode(); Node branches = git.getNode("branches"); Item primaryItem = branches.getPrimaryItem(); assertThat(primaryItem, is(notNullValue())); assertThat(primaryItem, is(instanceOf(Node.class))); Node primaryNode = (Node)primaryItem; assertThat(primaryNode.getName(), is("master")); assertThat(primaryNode.getParent(), is(sameInstance(branches))); assertThat(primaryNode, is(sameInstance(branches.getNode("master")))); } @Test public void shouldFindMasterBranchAsPrimaryItemUnderTreeNode() throws Exception { Node git = gitRemoteNode(); Node tree = git.getNode("tree"); Item primaryItem = tree.getPrimaryItem(); assertThat(primaryItem, is(notNullValue())); assertThat(primaryItem, is(instanceOf(Node.class))); Node primaryNode = (Node)primaryItem; assertThat(primaryNode.getName(), is("master")); assertThat(primaryNode.getParent(), is(sameInstance(tree))); assertThat(primaryNode, is(sameInstance(tree.getNode("master")))); } @Test public void shouldFindTreeBranchAsPrimaryItemUnderGitRoot() throws Exception { Node git = gitRemoteNode(); Node tree = git.getNode("tree"); assertThat(tree, is(notNullValue())); Item primaryItem = git.getPrimaryItem(); assertThat(primaryItem, is(notNullValue())); assertThat(primaryItem, is(instanceOf(Node.class))); Node primaryNode = (Node)primaryItem; assertThat(primaryNode.getName(), is(tree.getName())); assertThat(primaryNode.getParent(), is(sameInstance(git))); assertThat(primaryNode, is(sameInstance(tree))); } @Test public void shouldFindLatestCommitInMasterBranch() throws Exception { Node git = gitRemoteNode(); Node commits = git.getNode("commits"); Node master = commits.getNode("master"); Node commit = master.getNodes().nextNode(); // the first commit in the history of the 'master' branch ... // print = true; printDetails(commit); assertNodeHasObjectIdProperty(commit, commit.getName()); assertNodeHasCommittedProperties(commit); assertThat(commit.getProperty("git:title").getString(), is(notNullValue())); assertThat(commit.getProperty("git:tree").getNode().getPath(), is(git.getPath() + "/tree/" + commit.getName())); assertThat(commit.getProperty("git:detail").getNode().getPath(), is(git.getPath() + "/commit/" + commit.getName())); } @Test public void shouldFindLatestCommitDetailsInMasterBranch() throws Exception { Node git = gitRemoteNode(); Node commits = git.getNode("commit"); Node commit = commits.getNodes().nextNode(); // the first commit ... // print = true; printDetails(commit); assertNodeHasObjectIdProperty(commit); assertNodeHasCommittedProperties(commit); assertThat(commit.getProperty("git:parents").isMultiple(), is(true)); for (Value parentValue : commit.getProperty("git:parents").getValues()) { String identifier = parentValue.getString(); Node parent = getSession().getNodeByIdentifier(identifier); assertThat(parent, is(notNullValue())); } assertThat(commit.getProperty("git:diff").getString(), is(notNullValue())); assertThat(commit.getProperty("git:tree").getNode().getPath(), is(treePathFor(commit))); } @Test @FixFor( "MODE-2352" ) public void shouldReadTreeObjectProperties() throws Exception { Node tree = session.getNode("/repos/git-modeshape-remote/tree/72ea74be3b3a50345a1b2f543f78fd6be00caa35"); assertNotNull(tree); PropertyIterator propertyIterator = tree.getProperties(); while (propertyIterator.hasNext()) { Property property = propertyIterator.nextProperty(); assertNotNull(property.getName()); assertNotNull(property.getValue()); } } @Test @FixFor( "MODE-2352" ) public void shouldReadBranchObjectProperties() throws Exception { Node branch = session.getNode("/repos/git-modeshape-remote/branches/master"); assertNotNull(branch); PropertyIterator propertyIterator = branch.getProperties(); while (propertyIterator.hasNext()) { Property property = propertyIterator.nextProperty(); assertNotNull(property.getName()); assertNotNull(property.getValue()); } } @Test @FixFor( "MODE-2352" ) public void shouldNavigateCommitWithMultiplePages() throws Exception { Node commit = session.getNode("/repos/git-modeshape-remote/commits/d1f7daf32bd67edded7545221cd5c79d94813310"); assertNotNull(commit); NodeIterator childrenIterator = commit.getNodes(); while (childrenIterator.hasNext()) { childrenIterator.nextNode(); } } @Test @FixFor( "MODE-2643") public void shouldReadBinaryNodeAsLargeFile() throws Exception { //use some JGit API magic to reconfigure the default threshold which is around 50MB //so that when we attempt to read a larger binary, it will be seen as a large file by JGit WindowCacheConfig newConfig = new WindowCacheConfig(); newConfig.setStreamFileThreshold(2 * WindowCacheConfig.MB); newConfig.install(); try { readLargeBinary(); } finally { newConfig.setStreamFileThreshold(PackConfig.DEFAULT_BIG_FILE_THRESHOLD); newConfig.install(); } } @Test @FixFor( "MODE-2643") public void shouldReadBinaryNodeAsRegularFile() throws Exception { readLargeBinary(); } private void readLargeBinary() throws Exception { Node commit = session.getNode("/repos/git-modeshape-remote/tree/master/modeshape-jcr/src/test/resources/docs/postgresql-8.4.1-US.pdf"); assertNotNull(commit); Binary data = commit.getNode("jcr:content").getProperty("jcr:data").getBinary(); long size = data.getSize(); assertTrue(size > 0); //simply read the stream to make sure it's valid ByteArrayOutputStream baos = new ByteArrayOutputStream(); BufferedOutputStream bos = new BufferedOutputStream(baos); IoUtil.write(data.getStream(), bos); assertEquals("invalid binary stream", size, baos.toByteArray().length); } protected void assertNodeHasObjectIdProperty( Node node ) throws Exception { assertThat(node.getProperty("git:objectId").getString(), is(notNullValue())); } protected void assertNodeHasObjectIdProperty( Node node, String commitId ) throws Exception { assertThat(node.getProperty("git:objectId").getString(), is(commitId)); } protected void assertNodeHasCommittedProperties( Node node ) throws Exception { assertThat(node.getProperty("git:author").getString(), is(notNullValue())); assertThat(node.getProperty("git:committer").getString(), is(notNullValue())); assertThat(node.getProperty("git:committed").getDate(), is(notNullValue())); assertThat(node.getProperty("git:title").getString(), is(notNullValue())); } /** * The <i>minimal</i> names of the files and/or folders that are expected to exist at the top-level of the Git repository. * Additional file and folder names will be acceptable. * * @return the file and folder names; never null */ protected static String[] expectedTopLevelFileAndFolderNames() { return new String[]{"modeshape-parent", "pom.xml"}; } /** * The <i>minimal</i> names of the branches that are expected to exist. Additional branch names will be acceptable. * Note that if any of these branches do not exist at startup, the tests will attempt to retrieve them from remote/origin * * @return the branch names; never null */ protected static String[] expectedRemoteBranchNames() { return new String[]{"master", "2.x", "3.x", "4.x"}; } /** * The <i>minimal</i> names of the tags that are expected to exist. Additional tag names will be acceptable. * Note that if any of these tags do not exist at startup, the tests will attempt to retrieve them from remote/origin * * @return the tag names; never null */ protected static String[] expectedTagNames() { return new String[]{"modeshape-3.0.0.Final", "dna-0.2"}; } }
apache-2.0
ind9/gocd
api/api-agents-v5/src/main/java/com/thoughtworks/go/apiv5/agents/AgentsControllerV5.java
9481
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.apiv5.agents; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.thoughtworks.go.api.ApiController; import com.thoughtworks.go.api.ApiVersion; import com.thoughtworks.go.api.CrudController; import com.thoughtworks.go.api.base.OutputWriter; import com.thoughtworks.go.api.representers.JsonReader; import com.thoughtworks.go.api.spring.ApiAuthenticationHelper; import com.thoughtworks.go.api.util.GsonTransformer; import com.thoughtworks.go.api.util.MessageJson; import com.thoughtworks.go.apiv5.agents.model.AgentBulkUpdateRequest; import com.thoughtworks.go.apiv5.agents.model.AgentUpdateRequest; import com.thoughtworks.go.apiv5.agents.representers.AgentBulkUpdateRequestRepresenter; import com.thoughtworks.go.apiv5.agents.representers.AgentRepresenter; import com.thoughtworks.go.apiv5.agents.representers.AgentUpdateRequestRepresenter; import com.thoughtworks.go.apiv5.agents.representers.AgentsRepresenter; import com.thoughtworks.go.config.exceptions.EntityType; import com.thoughtworks.go.config.exceptions.HttpException; import com.thoughtworks.go.domain.AgentInstance; import com.thoughtworks.go.domain.NullAgentInstance; import com.thoughtworks.go.server.service.AgentService; import com.thoughtworks.go.server.service.EnvironmentConfigService; import com.thoughtworks.go.server.service.SecurityService; import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult; import com.thoughtworks.go.server.service.result.HttpOperationResult; import com.thoughtworks.go.spark.Routes; import com.thoughtworks.go.spark.spring.SparkSpringController; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import spark.Request; import spark.Response; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singletonList; import static spark.Spark.*; @Component public class AgentsControllerV5 extends ApiController implements SparkSpringController, CrudController<AgentInstance> { private final AgentService agentService; private final ApiAuthenticationHelper apiAuthenticationHelper; private final SecurityService securityService; private final EnvironmentConfigService environmentConfigService; @Autowired public AgentsControllerV5(AgentService agentService, ApiAuthenticationHelper apiAuthenticationHelper, SecurityService securityService, EnvironmentConfigService environmentConfigService) { super(ApiVersion.v5); this.agentService = agentService; this.apiAuthenticationHelper = apiAuthenticationHelper; this.securityService = securityService; this.environmentConfigService = environmentConfigService; } @Override public String controllerBasePath() { return Routes.AgentsAPI.BASE; } @Override public void setupRoutes() { path(controllerBasePath(), () -> { before("", mimeType, this::setContentType); before("/*", mimeType, this::setContentType); before("", mimeType, this::checkSecurityOr403); before("/*", mimeType, this::checkSecurityOr403); get("", mimeType, this::index); get(Routes.AgentsAPI.UUID, mimeType, this::show); patch(Routes.AgentsAPI.UUID, mimeType, this::update); patch("", mimeType, this::bulkUpdate); delete(Routes.AgentsAPI.UUID, mimeType, this::deleteAgent); delete("", mimeType, this::bulkDeleteAgents); exception(HttpException.class, this::httpException); }); } public String index(Request request, Response response) throws IOException { return writerForTopLevelObject(request, response, outputWriter -> AgentsRepresenter.toJSON(outputWriter, agentService.agentEnvironmentConfigsMap(), securityService, currentUsername())); } public String show(Request request, Response response) throws IOException { final AgentInstance agentInstance = fetchEntityFromConfig(request.params("uuid")); return writerForTopLevelObject(request, response, outputWriter -> AgentRepresenter.toJSON(outputWriter, agentInstance, environmentConfigService.environmentConfigsFor(request.params("uuid")), securityService, currentUsername())); } public String update(Request request, Response response) throws IOException { final String uuid = request.params("uuid"); final AgentUpdateRequest agentUpdateRequest = AgentUpdateRequestRepresenter.fromJSON(request.body()); final HttpOperationResult result = new HttpOperationResult(); final AgentInstance updatedAgentInstance = agentService.updateAgentAttributes( currentUsername(), result, uuid, agentUpdateRequest.getHostname(), agentUpdateRequest.getResources(), agentUpdateRequest.getEnvironments(), agentUpdateRequest.getAgentConfigState() ); return handleCreateOrUpdateResponse(request, response, updatedAgentInstance, result); } public String bulkUpdate(Request request, Response response) throws IOException { final AgentBulkUpdateRequest bulkUpdateRequest = AgentBulkUpdateRequestRepresenter.fromJSON(request.body()); final HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); agentService.bulkUpdateAgentAttributes(currentUsername(), result, bulkUpdateRequest.getUuids(), bulkUpdateRequest.getOperations().getResources().toAdd(), bulkUpdateRequest.getOperations().getResources().toRemove(), bulkUpdateRequest.getOperations().getEnvironments().toAdd(), bulkUpdateRequest.getOperations().getEnvironments().toRemove(), bulkUpdateRequest.getAgentConfigState() ); return renderHTTPOperationResult(result, request, response); } public String deleteAgent(Request request, Response response) throws IOException { final HttpOperationResult result = new HttpOperationResult(); agentService.deleteAgents(currentUsername(), result, singletonList(request.params("uuid"))); return renderHTTPOperationResult(result, request, response); } public String bulkDeleteAgents(Request request, Response response) throws IOException { final JsonReader reader = GsonTransformer.getInstance().jsonReaderFrom(request.body()); final List<String> uuids = toList(reader.optJsonArray("uuids").orElse(new JsonArray())); final HttpOperationResult result = new HttpOperationResult(); agentService.deleteAgents(currentUsername(), result, uuids); return renderHTTPOperationResult(result, request, response); } @Override public String etagFor(AgentInstance entityFromServer) { throw new UnsupportedOperationException(); } @Override public EntityType getEntityType() { return EntityType.Agent; } @Override public AgentInstance doFetchEntityFromConfig(String uuid) { final AgentInstance agentInstance = agentService.findAgent(uuid); return agentInstance instanceof NullAgentInstance ? null : agentInstance; } @Override public AgentInstance buildEntityFromRequestBody(Request req) { return null; } @Override public Consumer<OutputWriter> jsonWriter(AgentInstance agentInstance) { return outputWriter -> AgentRepresenter.toJSON(outputWriter, agentInstance, environmentConfigService.environmentConfigsFor(agentInstance.getUuid()), securityService, currentUsername()); } private void checkSecurityOr403(Request request, Response response) { if (Arrays.asList("GET", "HEAD").contains(request.requestMethod().toUpperCase())) { apiAuthenticationHelper.checkUserAnd403(request, response); return; } apiAuthenticationHelper.checkAdminUserAnd403(request, response); } private List<String> toList(JsonArray jsonArray) { final List<String> list = new ArrayList<>(); for (JsonElement element : jsonArray) { list.add(element.getAsString()); } return list; } private String handleCreateOrUpdateResponse(Request req, Response res, AgentInstance agentInstance, HttpOperationResult result) { if (result.isSuccess()) { return jsonize(req, agentInstance); } else { res.status(result.httpCode()); String errorMessage = result.message(); return null == agentInstance ? MessageJson.create(errorMessage) : MessageJson.create(errorMessage, jsonWriter(agentInstance)); } } }
apache-2.0
JavierJia/vxquery
vxquery-core/src/main/java/org/apache/vxquery/runtime/functions/sequence/FnRemoveScalarEvaluatorFactory.java
4630
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.vxquery.runtime.functions.sequence; import java.io.IOException; import org.apache.vxquery.datamodel.accessors.SequencePointable; import org.apache.vxquery.datamodel.accessors.TaggedValuePointable; import org.apache.vxquery.datamodel.builders.sequence.SequenceBuilder; import org.apache.vxquery.datamodel.values.ValueTag; import org.apache.vxquery.exceptions.ErrorCode; import org.apache.vxquery.exceptions.SystemException; import org.apache.vxquery.runtime.functions.base.AbstractTaggedValueArgumentScalarEvaluator; import org.apache.vxquery.runtime.functions.base.AbstractTaggedValueArgumentScalarEvaluatorFactory; import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException; import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluator; import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory; import edu.uci.ics.hyracks.api.context.IHyracksTaskContext; import edu.uci.ics.hyracks.data.std.api.IPointable; import edu.uci.ics.hyracks.data.std.primitive.LongPointable; import edu.uci.ics.hyracks.data.std.primitive.VoidPointable; import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage; public class FnRemoveScalarEvaluatorFactory extends AbstractTaggedValueArgumentScalarEvaluatorFactory { private static final long serialVersionUID = 1L; public FnRemoveScalarEvaluatorFactory(IScalarEvaluatorFactory[] args) { super(args); } @Override protected IScalarEvaluator createEvaluator(IHyracksTaskContext ctx, IScalarEvaluator[] args) throws AlgebricksException { final ArrayBackedValueStorage abvs = new ArrayBackedValueStorage(); final SequenceBuilder sb = new SequenceBuilder(); final SequencePointable seq = (SequencePointable) SequencePointable.FACTORY.createPointable(); final VoidPointable p = (VoidPointable) VoidPointable.FACTORY.createPointable(); final LongPointable longp = (LongPointable) LongPointable.FACTORY.createPointable(); return new AbstractTaggedValueArgumentScalarEvaluator(args) { @Override protected void evaluate(TaggedValuePointable[] args, IPointable result) throws SystemException { try { TaggedValuePointable tvp2 = args[1]; if (tvp2.getTag() != ValueTag.XS_INTEGER_TAG) { throw new SystemException(ErrorCode.FORG0006); } tvp2.getValue(longp); abvs.reset(); sb.reset(abvs); TaggedValuePointable tvp1 = args[0]; if (tvp1.getTag() == ValueTag.SEQUENCE_TAG) { tvp1.getValue(seq); int seqLen = seq.getEntryCount(); if (longp.getLong() < 1 || longp.getLong() > seqLen) { // Position is outside the sequence. Return target. result.set(tvp1); return; } else { for (int j = 0; j < seqLen; ++j) { if (longp.getLong() != j + 1) { seq.getEntry(j, p); sb.addItem(p); } } } } else if (longp.getLong() != 1) { // Position does not match the item. Return target. result.set(tvp1); return; } sb.finish(); result.set(abvs); } catch (IOException e) { throw new SystemException(ErrorCode.SYSE0001); } } }; } }
apache-2.0
dhalperi/batfish
projects/batfish/src/main/java/org/batfish/dataplane/rib/OspfIntraAreaRib.java
478
package org.batfish.dataplane.rib; import javax.annotation.ParametersAreNonnullByDefault; import org.batfish.datamodel.OspfIntraAreaRoute; @ParametersAreNonnullByDefault public class OspfIntraAreaRib extends AbstractRib<OspfIntraAreaRoute> { public OspfIntraAreaRib() { super(); } @Override public int comparePreference(OspfIntraAreaRoute lhs, OspfIntraAreaRoute rhs) { // reversed on purpose return Long.compare(rhs.getMetric(), lhs.getMetric()); } }
apache-2.0
forGGe/kaa
server/node/src/main/java/org/kaaproject/kaa/server/admin/client/mvp/place/SystemCtlSchemasPlace.java
1756
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.admin.client.mvp.place; import org.kaaproject.kaa.server.admin.client.util.Utils; import com.google.gwt.place.shared.PlaceTokenizer; import com.google.gwt.place.shared.Prefix; public class SystemCtlSchemasPlace extends TreePlace { public SystemCtlSchemasPlace() { } @Prefix(value = "sysCtlSchemas") public static class Tokenizer implements PlaceTokenizer<SystemCtlSchemasPlace> { @Override public SystemCtlSchemasPlace getPlace(String token) { return new SystemCtlSchemasPlace(); } @Override public String getToken(SystemCtlSchemasPlace place) { PlaceParams.clear(); return PlaceParams.generateToken(); } } @Override public boolean equals(Object obj) { return obj != null && (obj instanceof SystemCtlSchemasPlace); } @Override public String getName() { return Utils.constants.systemCtl(); } @Override public boolean isLeaf() { return true; } @Override public TreePlace createDefaultPreviousPlace() { return null; } }
apache-2.0
dfish3r/cas-x509-crl-ldaptive
cas-server-core/src/main/java/org/jasig/cas/remoting/server/RemoteCentralAuthenticationService.java
7107
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.remoting.server; import org.apache.commons.collections.Predicate; import org.jasig.cas.CentralAuthenticationService; import org.jasig.cas.authentication.AuthenticationException; import org.jasig.cas.authentication.Credential; import org.jasig.cas.authentication.principal.Service; import org.jasig.cas.logout.LogoutRequest; import org.jasig.cas.ticket.TicketException; import org.jasig.cas.ticket.InvalidTicketException; import org.jasig.cas.ticket.ServiceTicket; import org.jasig.cas.ticket.Ticket; import org.jasig.cas.ticket.TicketGrantingTicket; import org.jasig.cas.validation.Assertion; import org.springframework.util.Assert; import javax.validation.ConstraintViolation; import javax.validation.Validation; import javax.validation.Validator; import javax.validation.constraints.NotNull; import java.util.Collection; import java.util.List; import java.util.Set; /** * Wrapper implementation around a CentralAuthenticationService that * completes the marshalling of parameters from the web-service layer to the * service layer. Typically the only thing that is done is to validate the * parameters (as you would in the web tier) and then delegate to the service * layer. * <p> * The following properties are required: * </p> * <ul> * <li>centralAuthenticationService - the service layer we are delegating to.</li> * </ul> * * @author Scott Battaglia @deprecated As of 4.1. No longer required. The default implementation can be used to delegate calls to the service layer from WS. * @since 3.0.0 */ @Deprecated public final class RemoteCentralAuthenticationService implements CentralAuthenticationService { /** The CORE to delegate to. */ @NotNull private CentralAuthenticationService centralAuthenticationService; /** The validators to check the Credential. */ @NotNull private Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); /** * {@inheritDoc} * @throws IllegalArgumentException if the Credentials are null or if given * invalid credentials. */ @Override public TicketGrantingTicket createTicketGrantingTicket(final Credential... credentials) throws AuthenticationException, TicketException { Assert.notNull(credentials, "credentials cannot be null"); checkForErrors(credentials); return this.centralAuthenticationService.createTicketGrantingTicket(credentials); } /** * {@inheritDoc} */ @Override public ServiceTicket grantServiceTicket(final String ticketGrantingTicketId, final Service service) throws TicketException { return this.centralAuthenticationService.grantServiceTicket(ticketGrantingTicketId, service); } /** * {@inheritDoc} */ @Override public Collection<Ticket> getTickets(@NotNull final Predicate predicate) { return this.centralAuthenticationService.getTickets(predicate); } /** * {@inheritDoc} * @throws IllegalArgumentException if given invalid credentials */ @Override public ServiceTicket grantServiceTicket( final String ticketGrantingTicketId, final Service service, final Credential... credentials) throws AuthenticationException, TicketException { checkForErrors(credentials); return this.centralAuthenticationService.grantServiceTicket(ticketGrantingTicketId, service, credentials); } /** * {@inheritDoc} */ @Override public <T extends Ticket> T getTicket(final String ticketId, final Class<? extends Ticket> clazz) throws InvalidTicketException { return this.centralAuthenticationService.getTicket(ticketId, clazz); } /** * {@inheritDoc} */ @Override public Assertion validateServiceTicket(final String serviceTicketId, final Service service) throws TicketException { return this.centralAuthenticationService.validateServiceTicket(serviceTicketId, service); } /** * {@inheritDoc} * <p>Destroy a TicketGrantingTicket and perform back channel logout. This has the effect of invalidating any * Ticket that was derived from the TicketGrantingTicket being destroyed. May throw an * {@link IllegalArgumentException} if the TicketGrantingTicket ID is null. * * @param ticketGrantingTicketId the id of the ticket we want to destroy * @return the logout requests. */ @Override public List<LogoutRequest> destroyTicketGrantingTicket(final String ticketGrantingTicketId) { return this.centralAuthenticationService.destroyTicketGrantingTicket(ticketGrantingTicketId); } /** * {@inheritDoc} * @throws IllegalArgumentException if the credentials are invalid. */ @Override public TicketGrantingTicket delegateTicketGrantingTicket(final String serviceTicketId, final Credential... credentials) throws AuthenticationException, TicketException { checkForErrors(credentials); return this.centralAuthenticationService.delegateTicketGrantingTicket(serviceTicketId, credentials); } /** * Check for errors by asking the validator to review each credential. * * @param credentials the credentials */ private void checkForErrors(final Credential... credentials) { if (credentials == null) { return; } for (final Credential c : credentials) { final Set<ConstraintViolation<Credential>> errors = this.validator.validate(c); if (!errors.isEmpty()) { throw new IllegalArgumentException("Error validating credentials: " + errors.toString()); } } } /** * Set the CentralAuthenticationService. * * @param centralAuthenticationService The CentralAuthenticationService to * set. */ public void setCentralAuthenticationService( final CentralAuthenticationService centralAuthenticationService) { this.centralAuthenticationService = centralAuthenticationService; } /** * Set the list of validators. * * @param validator The array of validators to use. */ public void setValidator(final Validator validator) { this.validator = validator; } }
apache-2.0
gkatsikas/onos
drivers/fujitsu/src/main/java/org/onosproject/drivers/fujitsu/cli/VoltRebootOnuCommand.java
2239
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.drivers.fujitsu.cli; import org.apache.karaf.shell.api.action.Argument; import org.apache.karaf.shell.api.action.Command; import org.apache.karaf.shell.api.action.Completion; import org.apache.karaf.shell.api.action.lifecycle.Service; import org.onosproject.cli.AbstractShellCommand; import org.onosproject.cli.net.DeviceIdCompleter; import org.onosproject.net.DeviceId; import org.onosproject.drivers.fujitsu.behaviour.VoltOnuOperConfig; import org.onosproject.net.driver.DriverHandler; import org.onosproject.net.driver.DriverService; /** * Reboots an ONU in vOLT. */ @Service @Command(scope = "onos", name = "volt-rebootonu", description = "Reboots an ONU in vOLT") public class VoltRebootOnuCommand extends AbstractShellCommand { @Argument(index = 0, name = "uri", description = "Device ID", required = true, multiValued = false) @Completion(DeviceIdCompleter.class) String uri = null; @Argument(index = 1, name = "target", description = "PON link ID-ONU ID", required = true, multiValued = false) String target = null; private DeviceId deviceId; @Override protected void doExecute() { DriverService service = get(DriverService.class); deviceId = DeviceId.deviceId(uri); DriverHandler h = service.createHandler(deviceId); VoltOnuOperConfig volt = h.behaviour(VoltOnuOperConfig.class); String reply = volt.rebootOnu(target); if (reply != null) { print("%s", reply); } else { print("No reply from %s", deviceId.toString()); } } }
apache-2.0
darionyaphet/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/async/CompletedOperationCache.java
8343
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.rest.handler.async; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.types.Either; import org.apache.flink.util.AutoCloseableAsync; import org.apache.flink.util.Preconditions; import org.apache.flink.shaded.guava18.com.google.common.base.Ticker; import org.apache.flink.shaded.guava18.com.google.common.cache.Cache; import org.apache.flink.shaded.guava18.com.google.common.cache.CacheBuilder; import org.apache.flink.shaded.guava18.com.google.common.cache.RemovalListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.ThreadSafe; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; /** * Cache to manage ongoing operations. * * <p>The cache allows to register ongoing operations by calling * {@link #registerOngoingOperation(K, CompletableFuture)}, where the * {@code CompletableFuture} contains the operation result. Completed operations will be * removed from the cache automatically after a fixed timeout. */ @ThreadSafe class CompletedOperationCache<K extends OperationKey, R> implements AutoCloseableAsync { private static final long COMPLETED_OPERATION_RESULT_CACHE_DURATION_SECONDS = 300L; private static final Logger LOGGER = LoggerFactory.getLogger(CompletedOperationCache.class); /** * In-progress asynchronous operations. */ private final Map<K, ResultAccessTracker<R>> registeredOperationTriggers = new ConcurrentHashMap<>(); /** * Caches the result of completed operations. */ private final Cache<K, ResultAccessTracker<R>> completedOperations; private final Object lock = new Object(); @Nullable private CompletableFuture<Void> terminationFuture; CompletedOperationCache() { this(Ticker.systemTicker()); } @VisibleForTesting CompletedOperationCache(final Ticker ticker) { completedOperations = CacheBuilder.newBuilder() .expireAfterWrite(COMPLETED_OPERATION_RESULT_CACHE_DURATION_SECONDS, TimeUnit.SECONDS) .removalListener((RemovalListener<K, ResultAccessTracker<R>>) removalNotification -> { if (removalNotification.wasEvicted()) { Preconditions.checkState(removalNotification.getKey() != null); Preconditions.checkState(removalNotification.getValue() != null); // When shutting down the cache, we wait until all results are accessed. // When a result gets evicted from the cache, it will not be possible to access // it any longer, and we might be in the process of shutting down, so we mark // the result as accessed to avoid waiting indefinitely. removalNotification.getValue().markAccessed(); LOGGER.info("Evicted result with trigger id {} because its TTL of {}s has expired.", removalNotification.getKey().getTriggerId(), COMPLETED_OPERATION_RESULT_CACHE_DURATION_SECONDS); } }) .ticker(ticker) .build(); } /** * Registers an ongoing operation with the cache. * * @param operationResultFuture A future containing the operation result. * @throw IllegalStateException if the cache is already shutting down */ public void registerOngoingOperation( final K operationKey, final CompletableFuture<R> operationResultFuture) { final ResultAccessTracker<R> inProgress = ResultAccessTracker.inProgress(); synchronized (lock) { checkState(isRunning(), "The CompletedOperationCache has already been closed."); registeredOperationTriggers.put(operationKey, inProgress); } operationResultFuture.whenComplete((result, error) -> { if (error == null) { completedOperations.put(operationKey, inProgress.finishOperation(Either.Right(result))); } else { completedOperations.put(operationKey, inProgress.finishOperation(Either.Left(error))); } registeredOperationTriggers.remove(operationKey); }); } @GuardedBy("lock") private boolean isRunning() { return terminationFuture == null; } /** * Returns the operation result or a {@code Throwable} if the {@code CompletableFuture} * finished, otherwise {@code null}. * * @throws UnknownOperationKeyException If the operation is not found, and there is no ongoing * operation under the provided key. */ @Nullable public Either<Throwable, R> get( final K operationKey) throws UnknownOperationKeyException { ResultAccessTracker<R> resultAccessTracker; if ((resultAccessTracker = registeredOperationTriggers.get(operationKey)) == null && (resultAccessTracker = completedOperations.getIfPresent(operationKey)) == null) { throw new UnknownOperationKeyException(operationKey); } return resultAccessTracker.accessOperationResultOrError(); } @Override public CompletableFuture<Void> closeAsync() { synchronized (lock) { if (isRunning()) { terminationFuture = FutureUtils.orTimeout( asyncWaitForResultsToBeAccessed(), COMPLETED_OPERATION_RESULT_CACHE_DURATION_SECONDS, TimeUnit.SECONDS); } return terminationFuture; } } private CompletableFuture<Void> asyncWaitForResultsToBeAccessed() { return FutureUtils.waitForAll( Stream.concat(registeredOperationTriggers.values().stream(), completedOperations.asMap().values().stream()) .map(ResultAccessTracker::getAccessedFuture) .collect(Collectors.toList())); } @VisibleForTesting void cleanUp() { completedOperations.cleanUp(); } /** * Stores the result of an asynchronous operation, and tracks accesses to it. */ private static class ResultAccessTracker<R> { /** Result of an asynchronous operation. Null if operation is in progress. */ @Nullable private final Either<Throwable, R> operationResultOrError; /** Future that completes if a non-null {@link #operationResultOrError} is accessed. */ private final CompletableFuture<Void> accessed; private static <R> ResultAccessTracker<R> inProgress() { return new ResultAccessTracker<>(); } private ResultAccessTracker() { this.operationResultOrError = null; this.accessed = new CompletableFuture<>(); } private ResultAccessTracker(final Either<Throwable, R> operationResultOrError, final CompletableFuture<Void> accessed) { this.operationResultOrError = checkNotNull(operationResultOrError); this.accessed = checkNotNull(accessed); } /** * Creates a new instance of the tracker with the result of the asynchronous operation set. */ public ResultAccessTracker<R> finishOperation(final Either<Throwable, R> operationResultOrError) { checkState(this.operationResultOrError == null); return new ResultAccessTracker<>(checkNotNull(operationResultOrError), this.accessed); } /** * If present, returns the result of the asynchronous operation, and marks the result as * accessed. If the result is not present, this method returns null. */ @Nullable public Either<Throwable, R> accessOperationResultOrError() { if (operationResultOrError != null) { markAccessed(); } return operationResultOrError; } public CompletableFuture<Void> getAccessedFuture() { return accessed; } private void markAccessed() { accessed.complete(null); } } }
apache-2.0
SaiNadh001/aws-sdk-for-java
src/main/java/com/amazonaws/services/cloudsearch/model/transform/DescribeIndexFieldsRequestMarshaller.java
2331
/* * Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudsearch.model.transform; import java.util.HashMap; import java.util.List; import java.util.Map; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.services.cloudsearch.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringUtils; /** * Describe Index Fields Request Marshaller */ public class DescribeIndexFieldsRequestMarshaller implements Marshaller<Request<DescribeIndexFieldsRequest>, DescribeIndexFieldsRequest> { public Request<DescribeIndexFieldsRequest> marshall(DescribeIndexFieldsRequest describeIndexFieldsRequest) { if (describeIndexFieldsRequest == null) { throw new AmazonClientException("Invalid argument passed to marshall(...)"); } Request<DescribeIndexFieldsRequest> request = new DefaultRequest<DescribeIndexFieldsRequest>(describeIndexFieldsRequest, "AmazonCloudSearch"); request.addParameter("Action", "DescribeIndexFields"); request.addParameter("Version", "2011-02-01"); if (describeIndexFieldsRequest.getDomainName() != null) { request.addParameter("DomainName", StringUtils.fromString(describeIndexFieldsRequest.getDomainName())); } java.util.List<String> fieldNamesList = describeIndexFieldsRequest.getFieldNames(); int fieldNamesListIndex = 1; for (String fieldNamesListValue : fieldNamesList) { if (fieldNamesListValue != null) { request.addParameter("FieldNames.member." + fieldNamesListIndex, StringUtils.fromString(fieldNamesListValue)); } fieldNamesListIndex++; } return request; } }
apache-2.0
dita-ot/dita-ot
src/main/java/org/dita/dost/writer/AbstractDitaMetaWriter.java
8094
/* * This file is part of the DITA Open Toolkit project. * * Copyright 2004, 2005 IBM Corporation * * See the accompanying LICENSE file for applicable license. */ package org.dita.dost.writer; import org.dita.dost.util.DitaClass; import org.dita.dost.util.XMLUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.util.*; import static org.dita.dost.util.Constants.*; /** * Base class for metadata filter that reads dita files and inserts metadata. */ public abstract class AbstractDitaMetaWriter extends AbstractDomFilter { private static final Set<DitaClass> uniqueSet = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( TOPIC_CRITDATES, TOPIC_PERMISSIONS, TOPIC_PUBLISHER, TOPIC_SOURCE, MAP_SEARCHTITLE, TOPIC_SEARCHTITLE ))); private Map<String, Element> metaTable; private String topicid = null; public void setMetaTable(final Map<String, Element> metaTable) { this.metaTable = metaTable; } public void setTopicId(final String topicid) { this.topicid = topicid; } public abstract Document process(final Document doc); void processMetadata(final Element metadataContainer, final List<DitaClass> order) { for (int i = 0; i < order.size(); i++) { final DitaClass cls = order.get(i); final List<Element> newChildren = getNewChildren(cls, metadataContainer.getOwnerDocument()); if (!newChildren.isEmpty()) { final Element insertPoint = getInsertionRef(metadataContainer, order.subList(i, order.size())); for (final Element newChild: newChildren) { if (skipUnlockedNavtitle(metadataContainer, newChild)) { //Navtitle element without locktitle="yes", do not push into topic } else if (insertPoint != null) { if (uniqueSet.contains(cls) && cls.matches(insertPoint)) { metadataContainer.replaceChild(newChild, insertPoint); } else { metadataContainer.insertBefore(newChild, insertPoint); } } else { metadataContainer.appendChild(newChild); } } } } } boolean hasMetadata(final List<DitaClass> order) { for (final DitaClass cls: order) { if (metaTable.containsKey(cls.matcher)) { return true; } } return false; } /** * Check if an element is an unlocked navtitle, which should not be pushed into topics. * * @param metadataContainer container element * @param checkForNavtitle title element */ boolean skipUnlockedNavtitle(final Element metadataContainer, final Element checkForNavtitle) { if (!TOPIC_TITLEALTS.matches(metadataContainer) || !TOPIC_NAVTITLE.matches(checkForNavtitle)) { return false; } else if (checkForNavtitle.getAttributeNodeNS(DITA_OT_NS, ATTRIBUTE_NAME_LOCKTITLE) == null) { return false; } else if (ATTRIBUTE_NAME_LOCKTITLE_VALUE_YES.matches(checkForNavtitle.getAttributeNodeNS(DITA_OT_NS, ATTRIBUTE_NAME_LOCKTITLE).getValue())) { return false; } return true; } /** * Get metadata elements to add to current document. Elements have been cloned and imported * into the current document. * * @param cls element class of metadata elements * @param doc current document * @return list of metadata elements, may be empty */ private List<Element> getNewChildren(final DitaClass cls, final Document doc) { final List<Element> res = new ArrayList<>(); if (metaTable.containsKey(cls.matcher)) { metaTable.get(cls.matcher); final NodeList list = metaTable.get(cls.matcher).getChildNodes(); for (int i = 0; i < list.getLength(); i++) { Node item = list.item(i); res.add((Element) doc.importNode(item, true)); } } Collections.reverse(res); return res; } private Element getInsertionRef(final Element metadataContainer, final List<DitaClass> order) { if (order.isEmpty()) { return null; } else { final Element elem = getFirstChildElement(metadataContainer, order.get(0)); if (elem != null) { return elem; } else { return getInsertionRef(metadataContainer, order.subList(1, order.size())); } } } Element findMetadataContainer(final Element root, List<DitaClass> position, final DitaClass container) { Element prolog = getFirstChildElement(root, container); if (prolog == null) { prolog = root.getOwnerDocument().createElement(container.localName); prolog.setAttribute(ATTRIBUTE_NAME_CLASS, container.toString()); Element insertPoint = null; for (int i = position.size() - 1; i >= 0; i--) { insertPoint = getLastChildElement(root, position.get(i)); if (insertPoint != null) { break; } } if (insertPoint != null) { insertAfter(prolog, insertPoint); } else if (root.hasChildNodes()) { root.insertBefore(prolog, root.getFirstChild()); } else { root.appendChild(prolog); } } return prolog; } Element getFirstChildElement(final Element root, final DitaClass cls) { final NodeList children = root.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { final Node child = children.item(i); if (child.getNodeType() == Node.ELEMENT_NODE) { final Element elem = (Element) child; if (cls.matches(elem)) { return elem; } } } return null; } private Element getLastChildElement(final Element root, final DitaClass cls) { Element res = null; final NodeList children = root.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { final Node child = children.item(i); if (child.getNodeType() == Node.ELEMENT_NODE) { final Element elem = (Element) child; if (cls.matches(elem)) { res = elem; } } } return res; } public Element getMatchingTopicElement(Element root) { if (this.topicid != null) { final Element res = matchTopicElementById(root); if (res != null) { return res; } } return matchFirstTopicInDoc(root); } private Element matchFirstTopicInDoc(Element root) { if (root.getTagName().equals(ELEMENT_NAME_DITA)) { return getFirstChildElement(root, TOPIC_TOPIC); } else { return root; } } private Element matchTopicElementById(Element topic) { if (topic.getAttribute(ATTRIBUTE_NAME_ID).equals(topicid)) { return topic; } else { for (final Element elem : XMLUtils.getChildElements((topic))) { final Element res = matchTopicElementById(elem); if (res != null) { return res; } } } return null; } private void insertAfter(final Node newChild, final Node refChild) { final Node next = refChild.getNextSibling(); final Node parent = refChild.getParentNode(); if (next != null) { parent.insertBefore(newChild, next); } else { parent.appendChild(newChild); } } }
apache-2.0
AlexMinsk/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/impl/persistence/entity/DeploymentManager.java
10969
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.persistence.entity; import java.util.List; import org.camunda.bpm.engine.authorization.Resources; import org.camunda.bpm.engine.impl.DeploymentQueryImpl; import org.camunda.bpm.engine.impl.Page; import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.camunda.bpm.engine.impl.cfg.auth.ResourceAuthorizationProvider; import org.camunda.bpm.engine.impl.context.Context; import org.camunda.bpm.engine.impl.dmn.entity.repository.DecisionDefinitionManager; import org.camunda.bpm.engine.impl.dmn.entity.repository.DecisionRequirementsDefinitionManager; import org.camunda.bpm.engine.impl.persistence.AbstractManager; import org.camunda.bpm.engine.impl.persistence.deploy.cache.DeploymentCache; import org.camunda.bpm.engine.repository.CaseDefinition; import org.camunda.bpm.engine.repository.DecisionDefinition; import org.camunda.bpm.engine.repository.DecisionRequirementsDefinition; import org.camunda.bpm.engine.repository.Deployment; import org.camunda.bpm.engine.repository.ProcessDefinition; /** * @author Tom Baeyens * @author Deivarayan Azhagappan * @author Christopher Zell */ public class DeploymentManager extends AbstractManager { public void insertDeployment(DeploymentEntity deployment) { getDbEntityManager().insert(deployment); createDefaultAuthorizations(deployment); for (ResourceEntity resource : deployment.getResources().values()) { resource.setDeploymentId(deployment.getId()); getResourceManager().insertResource(resource); } Context .getProcessEngineConfiguration() .getDeploymentCache() .deploy(deployment); } public void deleteDeployment(String deploymentId, boolean cascade) { deleteDeployment(deploymentId, cascade, false, false); } public void deleteDeployment(String deploymentId, boolean cascade, boolean skipCustomListeners, boolean skipIoMappings) { List<ProcessDefinition> processDefinitions = getProcessDefinitionManager().findProcessDefinitionsByDeploymentId(deploymentId); if (cascade) { // *NOTE*: // The process instances of ALL process definitions must be // deleted, before every process definition can be deleted! // // On deletion of all process instances, the task listeners will // be deleted as well. Deletion of tasks and listeners needs // the redeployment of deployments, which can cause to problems if // is done sequential with deletion of process definition. // // For example: // Deployment contains two process definiton. First process definition // and instances will be removed, also cleared from the cache. // Second process definition will be removed and his instances. // Deletion of instances will cause redeployment this deploys again // first into the cache. Only the second will be removed from cache and // first remains in the cache after the deletion process. // // Thats why we have to clear up all instances at first, after that // we can cleanly remove the process definitions. for (ProcessDefinition processDefinition: processDefinitions) { String processDefinitionId = processDefinition.getId(); getProcessInstanceManager() .deleteProcessInstancesByProcessDefinition(processDefinitionId, "deleted deployment", true, skipCustomListeners, skipIoMappings); } // delete historic job logs (for example for timer start event jobs) getHistoricJobLogManager().deleteHistoricJobLogsByDeploymentId(deploymentId); } for (ProcessDefinition processDefinition : processDefinitions) { String processDefinitionId = processDefinition.getId(); // Process definition cascade true deletes the history and // process instances if instances flag is set as well to true. // Problem as described above, redeployes the deployment. // Represents no problem if only one process definition is deleted // in a transaction! We have to set the instances flag to false. getProcessDefinitionManager() .deleteProcessDefinition(processDefinition, processDefinitionId, cascade, false, skipCustomListeners); } deleteCaseDeployment(deploymentId, cascade); deleteDecisionDeployment(deploymentId, cascade); deleteDecisionRequirementDeployment(deploymentId); getResourceManager().deleteResourcesByDeploymentId(deploymentId); deleteAuthorizations(Resources.DEPLOYMENT, deploymentId); getDbEntityManager().delete(DeploymentEntity.class, "deleteDeployment", deploymentId); } protected void deleteCaseDeployment(String deploymentId, boolean cascade) { ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration(); if (processEngineConfiguration.isCmmnEnabled()) { List<CaseDefinition> caseDefinitions = getCaseDefinitionManager().findCaseDefinitionByDeploymentId(deploymentId); if (cascade) { // delete case instances for (CaseDefinition caseDefinition: caseDefinitions) { String caseDefinitionId = caseDefinition.getId(); getCaseInstanceManager() .deleteCaseInstancesByCaseDefinition(caseDefinitionId, "deleted deployment", true); } } // delete case definitions from db getCaseDefinitionManager() .deleteCaseDefinitionsByDeploymentId(deploymentId); for (CaseDefinition caseDefinition : caseDefinitions) { String processDefinitionId = caseDefinition.getId(); // remove case definitions from cache: Context .getProcessEngineConfiguration() .getDeploymentCache() .removeCaseDefinition(processDefinitionId); } } } protected void deleteDecisionDeployment(String deploymentId, boolean cascade) { ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration(); if (processEngineConfiguration.isDmnEnabled()) { DecisionDefinitionManager decisionDefinitionManager = getDecisionDefinitionManager(); List<DecisionDefinition> decisionDefinitions = decisionDefinitionManager.findDecisionDefinitionByDeploymentId(deploymentId); if(cascade) { // delete historic decision instances for(DecisionDefinition decisionDefinition : decisionDefinitions) { getHistoricDecisionInstanceManager().deleteHistoricDecisionInstancesByDecisionDefinitionId(decisionDefinition.getId()); } } // delete decision definitions from db decisionDefinitionManager .deleteDecisionDefinitionsByDeploymentId(deploymentId); DeploymentCache deploymentCache = processEngineConfiguration.getDeploymentCache(); for (DecisionDefinition decisionDefinition : decisionDefinitions) { String decisionDefinitionId = decisionDefinition.getId(); // remove decision definitions from cache: deploymentCache .removeDecisionDefinition(decisionDefinitionId); } } } protected void deleteDecisionRequirementDeployment(String deploymentId) { ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration(); if (processEngineConfiguration.isDmnEnabled()) { DecisionRequirementsDefinitionManager manager = getDecisionRequirementsDefinitionManager(); List<DecisionRequirementsDefinition> decisionRequirementsDefinitions = manager.findDecisionRequirementsDefinitionByDeploymentId(deploymentId); // delete decision requirements definitions from db manager.deleteDecisionRequirementsDefinitionsByDeploymentId(deploymentId); DeploymentCache deploymentCache = processEngineConfiguration.getDeploymentCache(); for (DecisionRequirementsDefinition decisionRequirementsDefinition : decisionRequirementsDefinitions) { String decisionDefinitionId = decisionRequirementsDefinition.getId(); // remove decision requirements definitions from cache: deploymentCache.removeDecisionRequirementsDefinition(decisionDefinitionId); } } } public DeploymentEntity findLatestDeploymentByName(String deploymentName) { List<?> list = getDbEntityManager().selectList("selectDeploymentsByName", deploymentName, 0, 1); if (list!=null && !list.isEmpty()) { return (DeploymentEntity) list.get(0); } return null; } public DeploymentEntity findDeploymentById(String deploymentId) { return getDbEntityManager().selectById(DeploymentEntity.class, deploymentId); } @SuppressWarnings("unchecked") public List<DeploymentEntity> findDeploymentsByIds(String... deploymentsIds) { return getDbEntityManager().selectList("selectDeploymentsByIds", deploymentsIds); } public long findDeploymentCountByQueryCriteria(DeploymentQueryImpl deploymentQuery) { configureQuery(deploymentQuery); return (Long) getDbEntityManager().selectOne("selectDeploymentCountByQueryCriteria", deploymentQuery); } @SuppressWarnings("unchecked") public List<Deployment> findDeploymentsByQueryCriteria(DeploymentQueryImpl deploymentQuery, Page page) { configureQuery(deploymentQuery); return getDbEntityManager().selectList("selectDeploymentsByQueryCriteria", deploymentQuery, page); } @SuppressWarnings("unchecked") public List<String> getDeploymentResourceNames(String deploymentId) { return getDbEntityManager().selectList("selectResourceNamesByDeploymentId", deploymentId); } @SuppressWarnings("unchecked") public List<String> findDeploymentIdsByProcessInstances(List<String> processInstanceIds) { return getDbEntityManager().selectList("selectDeploymentIdsByProcessInstances", processInstanceIds); } @Override public void close() { } @Override public void flush() { } // helper ///////////////////////////////////////////////// protected void createDefaultAuthorizations(DeploymentEntity deployment) { if(isAuthorizationEnabled()) { ResourceAuthorizationProvider provider = getResourceAuthorizationProvider(); AuthorizationEntity[] authorizations = provider.newDeployment(deployment); saveDefaultAuthorizations(authorizations); } } protected void configureQuery(DeploymentQueryImpl query) { getAuthorizationManager().configureDeploymentQuery(query); getTenantManager().configureQuery(query); } }
apache-2.0
ohr/metrics
metrics-core/src/main/java/com/codahale/metrics/Timer.java
4657
package com.codahale.metrics; import java.io.Closeable; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; /** * A timer metric which aggregates timing durations and provides duration statistics, plus * throughput statistics via {@link Meter}. */ public class Timer implements Metered, Sampling { /** * A timing context. * * @see Timer#time() */ public static class Context implements Closeable { private final Timer timer; private final Clock clock; private final long startTime; private Context(Timer timer, Clock clock) { this.timer = timer; this.clock = clock; this.startTime = clock.getTick(); } /** * Updates the timer with the difference between current and start time. Call to this method will * not reset the start time. Multiple calls result in multiple updates. * @return the elapsed time in nanoseconds */ public long stop() { final long elapsed = clock.getTick() - startTime; timer.update(elapsed, TimeUnit.NANOSECONDS); return elapsed; } /** Equivalent to calling {@link #stop()}. */ @Override public void close() { stop(); } } private final Meter meter; private final Histogram histogram; private final Clock clock; /** * Creates a new {@link Timer} using an {@link ExponentiallyDecayingReservoir} and the default * {@link Clock}. */ public Timer() { this(new ExponentiallyDecayingReservoir()); } /** * Creates a new {@link Timer} that uses the given {@link Reservoir}. * * @param reservoir the {@link Reservoir} implementation the timer should use */ public Timer(Reservoir reservoir) { this(reservoir, Clock.defaultClock()); } /** * Creates a new {@link Timer} that uses the given {@link Reservoir} and {@link Clock}. * * @param reservoir the {@link Reservoir} implementation the timer should use * @param clock the {@link Clock} implementation the timer should use */ public Timer(Reservoir reservoir, Clock clock) { this.meter = new Meter(clock); this.clock = clock; this.histogram = new Histogram(reservoir); } /** * Adds a recorded duration. * * @param duration the length of the duration * @param unit the scale unit of {@code duration} */ public void update(long duration, TimeUnit unit) { update(unit.toNanos(duration)); } /** * Times and records the duration of an event. * * @param event a {@link Callable} whose {@link Callable#call()} method implements a process * whose duration should be timed * @param <T> the type of the value returned by {@code event} * @return the value returned by {@code event} * @throws Exception if {@code event} throws an {@link Exception} */ public <T> T time(Callable<T> event) throws Exception { final long startTime = clock.getTick(); try { return event.call(); } finally { update(clock.getTick() - startTime); } } /** * Times and records the duration of an event. * * @param event a {@link Runnable} whose {@link Runnable#run()} method implements a process * whose duration should be timed */ public void time(Runnable event) { long startTime = this.clock.getTick(); try { event.run(); } finally { update(this.clock.getTick() - startTime); } } /** * Returns a new {@link Context}. * * @return a new {@link Context} * @see Context */ public Context time() { return new Context(this, clock); } @Override public long getCount() { return histogram.getCount(); } @Override public double getFifteenMinuteRate() { return meter.getFifteenMinuteRate(); } @Override public double getFiveMinuteRate() { return meter.getFiveMinuteRate(); } @Override public double getMeanRate() { return meter.getMeanRate(); } @Override public double getOneMinuteRate() { return meter.getOneMinuteRate(); } @Override public Snapshot getSnapshot() { return histogram.getSnapshot(); } private void update(long duration) { if (duration >= 0) { histogram.update(duration); meter.mark(); } } }
apache-2.0
pomack/closure-templates
java/src/com/google/template/soy/shared/internal/SharedModule.java
5494
/* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.shared.internal; import com.google.common.collect.ImmutableMap; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.google.inject.Singleton; import com.google.inject.multibindings.Multibinder; import com.google.template.soy.coredirectives.CoreDirectivesModule; import com.google.template.soy.internal.i18n.BidiGlobalDir; import com.google.template.soy.msgs.SoyMsgBundle; import com.google.template.soy.shared.restricted.ApiCallScopeBindingAnnotations.ApiCall; import com.google.template.soy.shared.restricted.ApiCallScopeBindingAnnotations.IsUsingIjData; import com.google.template.soy.shared.restricted.ApiCallScopeBindingAnnotations.LocaleString; import com.google.template.soy.shared.restricted.SoyFunction; import com.google.template.soy.shared.restricted.SoyJavaRuntimeFunction; import com.google.template.soy.shared.restricted.SoyJavaRuntimePrintDirective; import com.google.template.soy.shared.restricted.SoyPrintDirective; import java.util.Map; import java.util.Set; /** * Guice module for shared classes. * * <p> Important: Do not use outside of Soy code (treat as superpackage-private). * */ public class SharedModule extends AbstractModule { @Override protected void configure() { // Install the core directives. install(new CoreDirectivesModule()); // If no functions or print directives are bound, we want an empty set instead of an error. Multibinder.newSetBinder(binder(), SoyFunction.class); Multibinder.newSetBinder(binder(), SoyPrintDirective.class); // Create the API call scope. GuiceSimpleScope apiCallScope = new GuiceSimpleScope(); bindScope(ApiCallScope.class, apiCallScope); // Make the API call scope instance injectable. bind(GuiceSimpleScope.class).annotatedWith(ApiCall.class) .toInstance(apiCallScope); // Bind unscoped providers for parameters in ApiCallScope (these throw exceptions). bind(Boolean.class).annotatedWith(IsUsingIjData.class) .toProvider(GuiceSimpleScope.<Boolean>getUnscopedProvider()) .in(ApiCallScope.class); bind(SoyMsgBundle.class) .toProvider(GuiceSimpleScope.<SoyMsgBundle>getUnscopedProvider()) .in(ApiCallScope.class); bind(String.class).annotatedWith(LocaleString.class) .toProvider(GuiceSimpleScope.<String>getUnscopedProvider()) .in(ApiCallScope.class); bind(BidiGlobalDir.class) .toProvider(GuiceSimpleScope.<BidiGlobalDir>getUnscopedProvider()) .in(ApiCallScope.class); } /** * Builds and provides the map of all installed SoyFunctions (name to function). * @param soyFunctionsSet The installed set of SoyFunctions (from Guice Multibinder). */ @Provides @Singleton Map<String, SoyFunction> provideSoyFunctionsMap(Set<SoyFunction> soyFunctionsSet) { ImmutableMap.Builder<String, SoyFunction> mapBuilder = ImmutableMap.builder(); for (SoyFunction function : soyFunctionsSet) { mapBuilder.put(function.getName(), function); } return mapBuilder.build(); } /** * Builds and provides the map of all installed SoyPrintDirectives (name to directive). * @param soyDirectivesSet The installed set of SoyPrintDirectives (from Guice Multibinder). */ @Provides @Singleton Map<String, SoyPrintDirective> provideSoyDirectivesMap(Set<SoyPrintDirective> soyDirectivesSet) { ImmutableMap.Builder<String, SoyPrintDirective> mapBuilder = ImmutableMap.builder(); for (SoyPrintDirective directive : soyDirectivesSet) { mapBuilder.put(directive.getName(), directive); } return mapBuilder.build(); } /** * Builds and provides the map of SoyJavaRuntimeFunctions (name to function). * @param soyFunctionsSet The installed set of SoyFunctions (from Guice Multibinder). Each * SoyFunction may or may not implement SoyJavaRuntimeFunction. */ @Provides @Singleton Map<String, SoyJavaRuntimeFunction> provideSoyJavaRuntimeFunctionsMap( Set<SoyFunction> soyFunctionsSet) { return ModuleUtils.buildSpecificSoyFunctionsMap(SoyJavaRuntimeFunction.class, soyFunctionsSet); } /** * Builds and provides the map of SoyJavaRuntimeDirectives (name to directive). * @param soyDirectivesSet The installed set of SoyPrintDirectives (from Guice Multibinder). Each * SoyDirective may or may not implement SoyJavaRuntimeDirective. */ @Provides @Singleton Map<String, SoyJavaRuntimePrintDirective> provideSoyJavaRuntimeDirectivesMap( Set<SoyPrintDirective> soyDirectivesSet) { return ModuleUtils.buildSpecificSoyDirectivesMap( SoyJavaRuntimePrintDirective.class, soyDirectivesSet); } @Override public boolean equals(Object other) { return other != null && this.getClass().equals(other.getClass()); } @Override public int hashCode() { return this.getClass().hashCode(); } }
apache-2.0
gkatsikas/onos
core/api/src/test/java/org/onosproject/net/config/basics/BasicRegionConfigTest.java
7138
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.config.basics; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableSet; import org.junit.Before; import org.junit.Test; import org.onosproject.net.DeviceId; import org.onosproject.net.config.InvalidFieldException; import org.onosproject.net.region.Region; import org.onosproject.ui.topo.LayoutLocation; import java.util.List; import java.util.Set; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.onosproject.net.region.RegionId.regionId; /** * Test class for {@link BasicRegionConfig}. */ public class BasicRegionConfigTest extends AbstractConfigTest { private static final String REGION_JSON = "configs.regions.1.json"; private static final String NAME = "name"; private static final String TYPE = "type"; private static final String DEVICES = "devices"; private static final String R1 = "r1"; private static final String R2 = "r2"; private static final String R3 = "r3"; private static final String EUROPE = "Europe"; private static final String PARIS = "Paris"; private static final String AUSTRALIA = "Australia"; private static final Set<DeviceId> R1_DEVS = ImmutableSet.of(dstr("01"), dstr("02"), dstr("03")); private static final Set<DeviceId> R2_DEVS = ImmutableSet.of(dstr("04"), dstr("05"), dstr("06")); private static final Set<DeviceId> R3_DEVS = ImmutableSet.of(dstr("07"), dstr("08"), dstr("09")); private static final Set<DeviceId> ALT_DEVICES = ImmutableSet.of(dstr("0a"), dstr("0b"), dstr("0c")); private JsonNode data; private BasicRegionConfig cfg; @Before public void setUp() { data = getTestJson(REGION_JSON); } private JsonNode getR(String key) { return data.get("regions").get(key).get("basic"); } // loads a region config from the test resource file private void loadRegion(String rid) { JsonNode node = getR(rid); print(JSON_LOADED, node); cfg = new BasicRegionConfig(); cfg.init(regionId(rid), rid, node, mapper, delegate); } private void checkRegion(String expN, Region.Type expT, Set<DeviceId> expD) { print(CHECKING_S, cfg); assertEquals("wrong name", expN, cfg.name()); assertEquals("wrong type", expT, cfg.type()); List<DeviceId> devs = cfg.devices(); if (expD == null) { assertNull("unexp device list", devs); } else { assertNotNull(devs); assertEquals("wr.size", expD.size(), devs.size()); for (DeviceId d : expD) { assertTrue("missing dev: " + d, devs.contains(d)); } } } @Test public void region1Config() { loadRegion(R1); checkRegion(EUROPE, Region.Type.CONTINENT, R1_DEVS); } @Test public void region2Config() { loadRegion(R2); checkRegion(PARIS, Region.Type.METRO, R2_DEVS); } @Test public void region3Config() { loadRegion(R3); checkRegion(R3, null, R3_DEVS); } @Test public void modifyName() { loadRegion(R1); cfg.name(AUSTRALIA); checkRegion(AUSTRALIA, Region.Type.CONTINENT, R1_DEVS); } @Test public void clearName() { loadRegion(R1); checkRegion(EUROPE, Region.Type.CONTINENT, R1_DEVS); cfg.name(null); // if the friendly name is cleared, name() returns the identifier checkRegion(R1, Region.Type.CONTINENT, R1_DEVS); } @Test public void modifyType() { loadRegion(R2); cfg.type(Region.Type.CAMPUS); checkRegion(PARIS, Region.Type.CAMPUS, R2_DEVS); } @Test public void clearType() { loadRegion(R2); cfg.type(null); checkRegion(PARIS, null, R2_DEVS); } @Test public void modifyDevices() { loadRegion(R3); cfg.devices(ALT_DEVICES); checkRegion(R3, null, ALT_DEVICES); } @Test public void clearDevices() { loadRegion(R3); cfg.devices(null); checkRegion(R3, null, null); } @Test public void sampleValidConfig() { ObjectNode node = new TmpJson() .props(NAME, TYPE) .arrays(DEVICES) .node(); cfg = new BasicRegionConfig(); cfg.init(regionId(R1), BASIC, node, mapper, delegate); assertTrue("not valid: " + cfg, cfg.isValid()); } @Test(expected = InvalidFieldException.class) public void sampleInvalidConfig() { ObjectNode node = new TmpJson() .props(NAME, TYPE, "foo") .arrays(DEVICES) .node(); cfg = new BasicRegionConfig(); cfg.init(regionId(R1), BASIC, node, mapper, delegate); cfg.isValid(); } @Test public void testPeerLocMapping() { String peer1 = "peer1"; String loc1 = LayoutLocation.Type.GRID.toString(); double loc1Y = 22.0; double loc1X = 33.0; String peer2 = "peer2"; String loc2 = LayoutLocation.Type.GEO.toString(); double loc2Y = 222.0; double loc2X = 333.0; loadRegion(R2); cfg.addPeerLocMapping(peer1, loc1, loc1Y, loc1X); cfg.addPeerLocMapping(peer2, loc2, loc2Y, loc2X); List<LayoutLocation> locs = cfg.getMappings(); assertThat(locs, hasSize(2)); LayoutLocation createdLoc1 = locs.stream().filter(loc -> loc.id().equals(peer1)).findFirst().orElse(null); LayoutLocation createdLoc2 = locs.stream().filter(loc -> loc.id().equals(peer2)).findFirst().orElse(null); assertThat(createdLoc1, notNullValue()); assertThat(createdLoc2, notNullValue()); assertThat(createdLoc1.locType().toString(), is(loc1)); assertThat(createdLoc1.longOrX(), is(loc1X)); assertThat(createdLoc1.latOrY(), is(loc1Y)); assertThat(createdLoc2.locType().toString(), is(loc2)); assertThat(createdLoc2.longOrX(), is(loc2X)); assertThat(createdLoc2.latOrY(), is(loc2Y)); } }
apache-2.0
infochimps-forks/ezbake-platform-services
warehaus/tools/src/main/java/ezbake/warehaus/tools/WarehausViewGet.java
3497
/* Copyright (C) 2013-2015 Computer Sciences Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ezbake.warehaus.tools; import java.io.IOException; import java.util.Properties; import org.apache.thrift.TException; import org.kohsuke.args4j.CmdLineException; import org.kohsuke.args4j.CmdLineParser; import org.kohsuke.args4j.Option; import ezbake.thrift.ThriftClientPool; import ezbake.base.thrift.EzSecurityToken; import ezbake.configuration.EzConfiguration; import ezbake.configuration.EzConfigurationLoaderException; import ezbake.warehaus.BinaryReplay; import ezbake.warehaus.ViewId; import ezbake.warehaus.WarehausService; public class WarehausViewGet { @Option(name="-f", aliases="--file", required=true, usage="The file") private String file; @Option(name="-u", aliases="--uri", required=true, usage="The uri") private String uri; @Option(name="-s", aliases="--namespace", required=true, usage="The namespace") private String namespace; @Option(name="-n", aliases="--viewname", required=true, usage="The view name") private String viewname; @Option(name="-v", aliases="--version", usage="The version") private String version; public static void main(String[] args) throws IOException, TException, EzConfigurationLoaderException { WarehausViewGet viewGet = new WarehausViewGet(); CmdLineParser parser = new CmdLineParser(viewGet); try { parser.parseArgument(args); viewGet.process(); System.out.println("WarehausViewGet started"); } catch (CmdLineException e) { System.err.println(e.getMessage()); parser.printUsage(System.err); } } public void process() throws TException, IOException, EzConfigurationLoaderException { Properties config; try { config = new EzConfiguration().getProperties(); } catch (EzConfigurationLoaderException e) { throw new RuntimeException(e); } ThriftClientPool pool = new ThriftClientPool(config); WarehausService.Client client = ToolHelper.createClient(pool); EzSecurityToken token = ToolHelper.importToken(); BinaryReplay binary; try { if (version != null) { binary = client.getLatestView(new ViewId(uri, namespace, viewname), token); } else { try { binary = client.getView(new ViewId(uri, namespace, viewname), Long.parseLong(version), token); } catch (NumberFormatException e) { binary = client.getLatestView(new ViewId(uri, namespace, viewname), token); } } ToolHelper.exportFile(file, binary.getPacket()); } catch (Exception e) { System.out.println(e.getMessage()); } finally { pool.returnToPool(client); pool.close(); } } }
apache-2.0
MichaelNedzelsky/intellij-community
java/java-tests/testSrc/com/intellij/refactoring/ChangeSignatureTest.java
15632
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.lang.java.JavaLanguage; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.refactoring.changeSignature.ChangeSignatureProcessor; import com.intellij.refactoring.changeSignature.JavaThrownExceptionInfo; import com.intellij.refactoring.changeSignature.ParameterInfoImpl; import com.intellij.refactoring.changeSignature.ThrownExceptionInfo; import com.intellij.refactoring.util.CanonicalTypes; import java.util.HashSet; /** * @author dsl */ public class ChangeSignatureTest extends ChangeSignatureBaseTest { private CommonCodeStyleSettings getJavaSettings() { return getCurrentCodeStyleSettings().getCommonSettings(JavaLanguage.INSTANCE); } public void testSimple() { doTest(null, null, null, new ParameterInfoImpl[0], new ThrownExceptionInfo[0], false); } public void testParameterReorder() { doTest(null, new ParameterInfoImpl[]{new ParameterInfoImpl(1), new ParameterInfoImpl(0)}, false); } public void testWarnAboutContract() { try { doTest(null, new ParameterInfoImpl[]{new ParameterInfoImpl(1), new ParameterInfoImpl(0)}, false); fail("Conflict expected"); } catch (BaseRefactoringProcessor.ConflictsInTestsException ignored) { } } public void testGenericTypes() { doTest(null, null, "T", method -> new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "x", myFactory.createTypeFromText("T", method.getParameterList()), "null"), new ParameterInfoImpl(-1, "y", myFactory.createTypeFromText("C<T>", method.getParameterList()), "null") }, false); } public void testGenericTypesInOldParameters() { doTest(null, null, null, method -> new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "t", myFactory.createTypeFromText("T", method), null) }, false); } public void testTypeParametersInMethod() { doTest(null, null, null, method -> new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "t", myFactory.createTypeFromText("T", method.getParameterList()), "null"), new ParameterInfoImpl(-1, "u", myFactory.createTypeFromText("U", method.getParameterList()), "null"), new ParameterInfoImpl(-1, "cu", myFactory.createTypeFromText("C<U>", method.getParameterList()), "null") }, false); } public void testDefaultConstructor() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "j", PsiType.INT, "27") }, false ); } public void testGenerateDelegate() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "i", PsiType.INT, "27") }, true ); } public void testGenerateDelegateForAbstract() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "i", PsiType.INT, "27") }, true ); } public void testGenerateDelegateWithReturn() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "i", PsiType.INT, "27") }, true ); } public void testGenerateDelegateWithParametersReordering() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(1), new ParameterInfoImpl(-1, "c", PsiType.CHAR, "'a'"), new ParameterInfoImpl(0, "j", PsiType.INT) }, true ); } public void testGenerateDelegateConstructor() { doTest(null, new ParameterInfoImpl[0], true); } public void testGenerateDelegateDefaultConstructor() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "i", PsiType.INT, "27") }, true); } public void testSCR40895() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "y", PsiType.INT), new ParameterInfoImpl(1, "b", PsiType.BOOLEAN) }, false); } public void testJavadocGenericsLink() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "y", myFactory.createTypeFromText("java.util.List<java.lang.String>", null)), new ParameterInfoImpl(0, "a", PsiType.BOOLEAN) }, false); } public void testParamNameSameAsFieldName() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "fieldName", PsiType.INT) }, false); } public void testParamNameNoConflict() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0), new ParameterInfoImpl(-1, "b", PsiType.BOOLEAN) }, false); } public void testVarargMethodToNonVarag() throws Exception { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "i", PsiType.INT), new ParameterInfoImpl(-1, "b", PsiType.BOOLEAN) }, false); } public void testParamJavadoc() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(1, "z", PsiType.INT), new ParameterInfoImpl(0, "y", PsiType.INT) }, false); } public void testParamJavadoc0() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(1, "z", PsiType.INT), new ParameterInfoImpl(0, "y", PsiType.INT) }, false); } public void testParamJavadoc1() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "z", PsiType.BOOLEAN) }, false); } public void testParamJavadoc2() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "z", PsiType.BOOLEAN), new ParameterInfoImpl(0, "a", PsiType.BOOLEAN), }, false); } public void testParamJavadoc3() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "a", PsiType.BOOLEAN), new ParameterInfoImpl(-1, "b", PsiType.BOOLEAN), }, false); } public void testJavadocNoNewLineInserted() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "newArgs", PsiType.DOUBLE), }, false); } public void testSuperCallFromOtherMethod() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "nnn", PsiType.INT, "-222"), }, false); } public void testUseAnyVariable() { doTest(null, null, null, method -> new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "l", myFactory.createTypeFromText("List", method), "null", true) }, false); } public void testUseThisAsAnyVariable() { doTest(null, null, null, method -> new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "l", myFactory.createTypeFromText("List", method), "null", true) }, false); } public void testUseAnyVariableAndDefault() { doTest(null, null, null, method -> new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "c", myFactory.createTypeFromText("C", method), "null", true) }, false); } public void testRemoveVarargParameter() { doTest(null, null, null, new ParameterInfoImpl[]{new ParameterInfoImpl(0)}, new ThrownExceptionInfo[0], false); } public void testEnumConstructor() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "i", PsiType.INT, "10") }, false); } public void testVarargs1() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(-1, "b", PsiType.BOOLEAN, "true"), new ParameterInfoImpl(0) }, false); } public void testVarargs2() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(1, "i", PsiType.INT), new ParameterInfoImpl(0, "b", new PsiEllipsisType(PsiType.BOOLEAN)) }, false); } public void testJavadocOfDeleted() { doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(0, "role", PsiType.INT), }, false); } public void testCovariantReturnType() { doTest(CommonClassNames.JAVA_LANG_RUNNABLE, new ParameterInfoImpl[0], false); } public void testReorderExceptions() { doTest(null, null, null, new SimpleParameterGen(new ParameterInfoImpl[0]), new SimpleExceptionsGen(new ThrownExceptionInfo[]{new JavaThrownExceptionInfo(1), new JavaThrownExceptionInfo(0)}), false); } public void testAlreadyHandled() { doTest(null, null, null, new SimpleParameterGen(new ParameterInfoImpl[0]), method -> { return new ThrownExceptionInfo[]{ new JavaThrownExceptionInfo(-1, myFactory.createTypeByFQClassName("java.lang.Exception", method.getResolveScope())) }; }, false ); } public void testConstructorException() { doTest(null, null, null, new SimpleParameterGen(new ParameterInfoImpl[0]), method -> { return new ThrownExceptionInfo[]{ new JavaThrownExceptionInfo(-1, myFactory.createTypeByFQClassName("java.io.IOException", method.getResolveScope())) }; }, false ); } public void testAddRuntimeException() { doTest(null, null, null, new SimpleParameterGen(new ParameterInfoImpl[0]), method -> { return new ThrownExceptionInfo[]{ new JavaThrownExceptionInfo(-1, myFactory.createTypeByFQClassName("java.lang.RuntimeException", method.getResolveScope())) }; }, false ); } public void testAddException() { doTest(null, null, null, new SimpleParameterGen(new ParameterInfoImpl[0]), method -> { return new ThrownExceptionInfo[]{ new JavaThrownExceptionInfo(-1, myFactory.createTypeByFQClassName("java.lang.Exception", method.getResolveScope())) }; }, false ); } public void testReorderWithVarargs() { // IDEADEV-26977 doTest(null, new ParameterInfoImpl[]{ new ParameterInfoImpl(1), new ParameterInfoImpl(0, "s", myFactory.createTypeFromText("java.lang.String...", getFile())) }, false); } public void testIntroduceParameterWithDefaultValueInHierarchy() { doTest(null, new ParameterInfoImpl[]{new ParameterInfoImpl(-1, "i", PsiType.INT, "0")}, false); } public void testReorderMultilineMethodParameters() { // Inspired by IDEA-54902 doTest(null, new ParameterInfoImpl[]{new ParameterInfoImpl(1), new ParameterInfoImpl(0)}, false); } public void testRemoveFirstParameter() { doTest(null, new ParameterInfoImpl[]{new ParameterInfoImpl(1)}, false); } public void testReplaceVarargWithArray() { doTest(null, null, null, method -> new ParameterInfoImpl[]{ new ParameterInfoImpl(1, "l", myFactory.createTypeFromText("List<T>[]", method.getParameterList()), "null", false), new ParameterInfoImpl(0, "s", myFactory.createTypeFromText("String", method.getParameterList())) }, false); } public void testMethodParametersAlignmentAfterMethodNameChange() { getJavaSettings().ALIGN_MULTILINE_PARAMETERS = true; getJavaSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; doTest(null, "test123asd", null, new SimpleParameterGen(), new SimpleExceptionsGen(), false); } public void testMethodParametersAlignmentAfterMethodVisibilityChange() { getJavaSettings().ALIGN_MULTILINE_PARAMETERS = true; getJavaSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; doTest(PsiModifier.PROTECTED, null, null, new SimpleParameterGen(), new SimpleExceptionsGen(), false); } public void testMethodParametersAlignmentAfterMethodReturnTypeChange() { getJavaSettings().ALIGN_MULTILINE_PARAMETERS = true; getJavaSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; doTest(null, null, "Exception", new SimpleParameterGen(), new SimpleExceptionsGen(), false); } public void testVisibilityOfOverriddenMethod() { doTest(PsiModifier.PACKAGE_LOCAL, "foo", "void", new ParameterInfoImpl[0], new ThrownExceptionInfo[0], false); } public void testRemoveExceptions() { doTest(null, null, "void", new SimpleParameterGen(), new SimpleExceptionsGen(), false); } public void testPropagateParameter() { String basePath = getRelativePath() + getTestName(false); configureByFile(basePath + ".java"); final PsiElement targetElement = TargetElementUtil.findTargetElement(getEditor(), TargetElementUtil.ELEMENT_NAME_ACCEPTED); assertTrue("<caret> is not on method name", targetElement instanceof PsiMethod); PsiMethod method = (PsiMethod)targetElement; final PsiClass containingClass = method.getContainingClass(); assertTrue(containingClass != null); final PsiMethod[] callers = containingClass.findMethodsByName("caller", false); assertTrue(callers.length > 0); final PsiMethod caller = callers[0]; final HashSet<PsiMethod> propagateParametersMethods = new HashSet<>(); propagateParametersMethods.add(caller); final PsiParameter[] parameters = method.getParameterList().getParameters(); new ChangeSignatureProcessor(getProject(), method, false, null, method.getName(), CanonicalTypes.createTypeWrapper(PsiType.VOID), new ParameterInfoImpl[]{ new ParameterInfoImpl(0, parameters[0].getName(), parameters[0].getType()), new ParameterInfoImpl(-1, "b", PsiType.BOOLEAN)}, null, propagateParametersMethods, null ).run(); checkResultByFile(basePath + "_after.java"); } public void testPropagateParameterWithOverrider() { String basePath = getRelativePath() + getTestName(false); configureByFile(basePath + ".java"); final PsiElement targetElement = TargetElementUtil.findTargetElement(getEditor(), TargetElementUtil.ELEMENT_NAME_ACCEPTED); assertTrue("<caret> is not on method name", targetElement instanceof PsiMethod); PsiMethod method = (PsiMethod)targetElement; final PsiClass containingClass = method.getContainingClass(); assertTrue(containingClass != null); final PsiMethod[] callers = containingClass.findMethodsByName("caller", false); assertTrue(callers.length > 0); final PsiMethod caller = callers[0]; final HashSet<PsiMethod> propagateParametersMethods = new HashSet<>(); propagateParametersMethods.add(caller); final PsiParameter[] parameters = method.getParameterList().getParameters(); new ChangeSignatureProcessor(getProject(), method, false, null, method.getName(), CanonicalTypes.createTypeWrapper(PsiType.VOID), new ParameterInfoImpl[]{ new ParameterInfoImpl(0, parameters[0].getName(), parameters[0].getType()), new ParameterInfoImpl(-1, "b", PsiType.BOOLEAN, "true")}, null, propagateParametersMethods, null ).run(); checkResultByFile(basePath + "_after.java"); } public void testTypeAnnotationsAllAround() { //String[] ps = {"@TA(1) int @TA(2) []", "java.util.@TA(4) List<@TA(5) Class<@TA(6) ?>>", "@TA(7) String @TA(8) ..."}; //String[] ex = {"@TA(42) IllegalArgumentException", "java.lang.@TA(43) IllegalStateException"}; //doTest("java.util.@TA(0) List<@TA(1) C.@TA(1) Inner>", ps, ex, false); String[] ps = {"@TA(2) int @TA(3) []", "@TA(4) List<@TA(5) Class<@TA(6) ?>>", "@TA(7) String @TA(8) ..."}; String[] ex = {}; doTest("@TA(0) List<@TA(1) Inner>", ps, ex, false); } /* workers */ }
apache-2.0
apache/aries
blueprint/plugin/blueprint-maven-plugin-annotation/src/main/java/org/apache/aries/blueprint/annotation/bean/package-info.java
951
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ @org.osgi.annotation.bundle.Export @org.osgi.annotation.versioning.Version("1.0.0") package org.apache.aries.blueprint.annotation.bean;
apache-2.0
otheng03/nbase-arc
api/java/src/test/java/com/navercorp/redis/cluster/HashesCommandsTest.java
11828
/* * Copyright 2015 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.redis.cluster; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Test; /** * @author jaehong.kim */ public class HashesCommandsTest extends RedisClusterTestBase { @Override public void clear() { redis.del(REDIS_KEY_0); redis.del(REDIS_KEY_1); redis.del(REDIS_BKEY_0); redis.del(REDIS_BKEY_1); redis.del(REDIS_BVALUE_0); } @Test public void hset() { long status = redis.hset(REDIS_KEY_0, REDIS_KEY_1, REDIS_VALUE_0); assertEquals(1, status); status = redis.hset(REDIS_KEY_0, REDIS_KEY_1, REDIS_KEY_0); assertEquals(0, status); // Binary long bstatus = redis.hset(REDIS_BKEY_0, REDIS_BKEY_1, REDIS_BVALUE_0); assertEquals(1, bstatus); bstatus = redis.hset(REDIS_BKEY_0, REDIS_BKEY_1, REDIS_BKEY_0); assertEquals(0, bstatus); } @Test public void hget() { redis.hset(REDIS_KEY_0, REDIS_KEY_1, REDIS_VALUE_0); assertEquals(null, redis.hget(REDIS_KEY_1, REDIS_KEY_0)); assertEquals(null, redis.hget(REDIS_KEY_0, REDIS_VALUE_0)); assertEquals(REDIS_VALUE_0, redis.hget(REDIS_KEY_0, REDIS_KEY_1)); // Binary redis.hset(REDIS_BKEY_0, REDIS_BKEY_1, REDIS_BVALUE_0); assertEquals(null, redis.hget(REDIS_BKEY_1, REDIS_BKEY_0)); assertEquals(null, redis.hget(REDIS_BKEY_0, REDIS_BVALUE_0)); assertArrayEquals(REDIS_BVALUE_0, redis.hget(REDIS_BKEY_0, REDIS_BKEY_1)); } @Test public void hsetnx() { long status = redis.hsetnx(REDIS_KEY_0, REDIS_KEY_1, REDIS_VALUE_0); assertEquals(1, status); assertEquals(REDIS_VALUE_0, redis.hget(REDIS_KEY_0, REDIS_KEY_1)); status = redis.hsetnx(REDIS_KEY_0, REDIS_KEY_1, REDIS_KEY_0); assertEquals(0, status); assertEquals(REDIS_VALUE_0, redis.hget(REDIS_KEY_0, REDIS_KEY_1)); status = redis.hsetnx(REDIS_KEY_0, REDIS_VALUE_0, REDIS_KEY_1); assertEquals(1, status); assertEquals(REDIS_KEY_1, redis.hget(REDIS_KEY_0, REDIS_VALUE_0)); // Binary long bstatus = redis.hsetnx(REDIS_BKEY_0, REDIS_BKEY_1, REDIS_BVALUE_0); assertEquals(1, bstatus); assertArrayEquals(REDIS_BVALUE_0, redis.hget(REDIS_BKEY_0, REDIS_BKEY_1)); bstatus = redis.hsetnx(REDIS_BKEY_0, REDIS_BKEY_1, REDIS_BKEY_0); assertEquals(0, bstatus); assertArrayEquals(REDIS_BVALUE_0, redis.hget(REDIS_BKEY_0, REDIS_BKEY_1)); bstatus = redis.hsetnx(REDIS_BKEY_0, REDIS_BVALUE_0, REDIS_BKEY_1); assertEquals(1, bstatus); assertArrayEquals(REDIS_BKEY_1, redis.hget(REDIS_BKEY_0, REDIS_BVALUE_0)); } @Test public void hmset() { Map<String, String> hash = new HashMap<String, String>(); hash.put(REDIS_KEY_1, REDIS_VALUE_0); hash.put(REDIS_VALUE_0, REDIS_KEY_1); String status = redis.hmset(REDIS_KEY_0, hash); assertEquals("OK", status); assertEquals(REDIS_VALUE_0, redis.hget(REDIS_KEY_0, REDIS_KEY_1)); assertEquals(REDIS_KEY_1, redis.hget(REDIS_KEY_0, REDIS_VALUE_0)); // Binary Map<byte[], byte[]> bhash = new HashMap<byte[], byte[]>(); bhash.put(REDIS_BKEY_1, REDIS_BVALUE_0); bhash.put(REDIS_BVALUE_0, REDIS_BKEY_1); String bstatus = redis.hmset(REDIS_BKEY_0, bhash); assertEquals("OK", bstatus); assertArrayEquals(REDIS_BVALUE_0, redis.hget(REDIS_BKEY_0, REDIS_BKEY_1)); assertArrayEquals(REDIS_BKEY_1, redis.hget(REDIS_BKEY_0, REDIS_BVALUE_0)); } @Test public void hmget() { Map<String, String> hash = new HashMap<String, String>(); hash.put(REDIS_KEY_1, REDIS_VALUE_0); hash.put(REDIS_VALUE_0, REDIS_KEY_1); redis.hmset(REDIS_KEY_0, hash); List<String> values = redis.hmget(REDIS_KEY_0, REDIS_KEY_1, REDIS_VALUE_0, REDIS_KEY_0); List<String> expected = new ArrayList<String>(); expected.add(REDIS_VALUE_0); expected.add(REDIS_KEY_1); expected.add(null); assertEquals(expected, values); // Binary Map<byte[], byte[]> bhash = new HashMap<byte[], byte[]>(); bhash.put(REDIS_BKEY_1, REDIS_BVALUE_0); bhash.put(REDIS_BVALUE_0, REDIS_BKEY_1); redis.hmset(REDIS_BKEY_0, bhash); List<byte[]> bvalues = redis.hmget(REDIS_BKEY_0, REDIS_BKEY_1, REDIS_BVALUE_0, REDIS_BKEY_0); List<byte[]> bexpected = new ArrayList<byte[]>(); bexpected.add(REDIS_BVALUE_0); bexpected.add(REDIS_BKEY_1); bexpected.add(null); assertEquals(bexpected, bvalues); } @Test public void hincrBy() { long value = redis.hincrBy(REDIS_KEY_0, REDIS_KEY_1, 1); assertEquals(1, value); value = redis.hincrBy(REDIS_KEY_0, REDIS_KEY_1, -1); assertEquals(0, value); value = redis.hincrBy(REDIS_KEY_0, REDIS_KEY_1, -10); assertEquals(-10, value); // Binary long bvalue = redis.hincrBy(REDIS_BKEY_0, REDIS_BKEY_1, 1); assertEquals(1, bvalue); bvalue = redis.hincrBy(REDIS_BKEY_0, REDIS_BKEY_1, -1); assertEquals(0, bvalue); bvalue = redis.hincrBy(REDIS_BKEY_0, REDIS_BKEY_1, -10); assertEquals(-10, bvalue); } @Test public void hincrByAt() { long status = redis.hset(REDIS_KEY_0, REDIS_KEY_1, "10.50"); assertEquals(1, status); double value = redis.hincrByFloat(REDIS_KEY_0, REDIS_KEY_1, 0.1); System.out.println(value); redis.hset(REDIS_KEY_0, REDIS_KEY_1, "5.0e3"); value = redis.hincrByFloat(REDIS_KEY_0, REDIS_KEY_1, 2.0e2); System.out.println(value); } @Test public void hexists() { Map<String, String> hash = new HashMap<String, String>(); hash.put(REDIS_KEY_1, REDIS_VALUE_0); hash.put(REDIS_VALUE_0, REDIS_KEY_1); redis.hmset(REDIS_KEY_0, hash); assertFalse(redis.hexists(REDIS_KEY_1, REDIS_KEY_0)); assertFalse(redis.hexists(REDIS_KEY_0, REDIS_KEY_0)); assertTrue(redis.hexists(REDIS_KEY_0, REDIS_KEY_1)); // Binary Map<byte[], byte[]> bhash = new HashMap<byte[], byte[]>(); bhash.put(REDIS_BKEY_1, REDIS_BVALUE_0); bhash.put(REDIS_BVALUE_0, REDIS_BKEY_1); redis.hmset(REDIS_BKEY_0, bhash); assertFalse(redis.hexists(REDIS_BKEY_1, REDIS_BKEY_0)); assertFalse(redis.hexists(REDIS_BKEY_0, REDIS_BKEY_0)); assertTrue(redis.hexists(REDIS_BKEY_0, REDIS_BKEY_1)); } @Test public void hdel() { Map<String, String> hash = new HashMap<String, String>(); hash.put(REDIS_KEY_1, REDIS_VALUE_0); hash.put(REDIS_VALUE_0, REDIS_KEY_1); redis.hmset(REDIS_KEY_0, hash); assertEquals(0, redis.hdel(REDIS_KEY_1, REDIS_KEY_0).intValue()); assertEquals(0, redis.hdel(REDIS_KEY_0, REDIS_KEY_0).intValue()); assertEquals(1, redis.hdel(REDIS_KEY_0, REDIS_KEY_1).intValue()); assertEquals(null, redis.hget(REDIS_KEY_0, REDIS_KEY_1)); // Binary Map<byte[], byte[]> bhash = new HashMap<byte[], byte[]>(); bhash.put(REDIS_BKEY_1, REDIS_BVALUE_0); bhash.put(REDIS_BVALUE_0, REDIS_BKEY_1); redis.hmset(REDIS_BKEY_0, bhash); assertEquals(0, redis.hdel(REDIS_BKEY_1, REDIS_BKEY_0).intValue()); assertEquals(0, redis.hdel(REDIS_BKEY_0, REDIS_BKEY_0).intValue()); assertEquals(1, redis.hdel(REDIS_BKEY_0, REDIS_BKEY_1).intValue()); assertEquals(null, redis.hget(REDIS_BKEY_0, REDIS_BKEY_1)); } @Test public void hlen() { Map<String, String> hash = new HashMap<String, String>(); hash.put(REDIS_KEY_1, REDIS_VALUE_0); hash.put(REDIS_VALUE_0, REDIS_KEY_1); redis.hmset(REDIS_KEY_0, hash); assertEquals(0, redis.hlen(REDIS_KEY_1).intValue()); assertEquals(2, redis.hlen(REDIS_KEY_0).intValue()); // Binary Map<byte[], byte[]> bhash = new HashMap<byte[], byte[]>(); bhash.put(REDIS_BKEY_1, REDIS_BVALUE_0); bhash.put(REDIS_BVALUE_0, REDIS_BKEY_1); redis.hmset(REDIS_BKEY_0, bhash); assertEquals(0, redis.hlen(REDIS_BKEY_1).intValue()); assertEquals(2, redis.hlen(REDIS_BKEY_0).intValue()); } @Test public void hkeys() { Map<String, String> hash = new LinkedHashMap<String, String>(); hash.put(REDIS_KEY_1, REDIS_VALUE_0); hash.put(REDIS_VALUE_0, REDIS_KEY_1); redis.hmset(REDIS_KEY_0, hash); Set<String> keys = redis.hkeys(REDIS_KEY_0); Set<String> expected = new LinkedHashSet<String>(); expected.add(REDIS_KEY_1); expected.add(REDIS_VALUE_0); assertEquals(expected, keys); // Binary Map<byte[], byte[]> bhash = new LinkedHashMap<byte[], byte[]>(); bhash.put(REDIS_BKEY_1, REDIS_BVALUE_0); bhash.put(REDIS_BVALUE_0, REDIS_BKEY_1); redis.hmset(REDIS_BKEY_0, bhash); Set<byte[]> bkeys = redis.hkeys(REDIS_BKEY_0); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(REDIS_BKEY_1); bexpected.add(REDIS_BVALUE_0); assertEquals(bexpected, bkeys); } @Test public void hvals() { Map<String, String> hash = new LinkedHashMap<String, String>(); hash.put(REDIS_KEY_1, REDIS_VALUE_0); hash.put(REDIS_VALUE_0, REDIS_KEY_1); redis.hmset(REDIS_KEY_0, hash); List<String> vals = redis.hvals(REDIS_KEY_0); assertEquals(2, vals.size()); assertTrue(vals.contains(REDIS_KEY_1)); assertTrue(vals.contains(REDIS_VALUE_0)); // Binary Map<byte[], byte[]> bhash = new LinkedHashMap<byte[], byte[]>(); bhash.put(REDIS_BKEY_1, REDIS_BVALUE_0); bhash.put(REDIS_BVALUE_0, REDIS_BKEY_1); redis.hmset(REDIS_BKEY_0, bhash); List<byte[]> bvals = redis.hvals(REDIS_BKEY_0); assertEquals(2, bvals.size()); assertTrue(arrayContains(bvals, REDIS_BKEY_1)); assertTrue(arrayContains(bvals, REDIS_BVALUE_0)); } @Test public void hgetAll() { Map<String, String> h = new HashMap<String, String>(); h.put(REDIS_KEY_1, REDIS_VALUE_0); h.put(REDIS_VALUE_0, REDIS_KEY_1); redis.hmset(REDIS_KEY_0, h); Map<String, String> hash = redis.hgetAll(REDIS_KEY_0); assertEquals(2, hash.size()); assertEquals(REDIS_VALUE_0, hash.get(REDIS_KEY_1)); assertEquals(REDIS_KEY_1, hash.get(REDIS_VALUE_0)); // Binary Map<byte[], byte[]> bh = new HashMap<byte[], byte[]>(); bh.put(REDIS_BKEY_1, REDIS_BVALUE_0); bh.put(REDIS_BVALUE_0, REDIS_BKEY_1); redis.hmset(REDIS_BKEY_0, bh); Map<byte[], byte[]> bhash = redis.hgetAll(REDIS_BKEY_0); assertEquals(2, bhash.size()); assertArrayEquals(REDIS_BVALUE_0, bhash.get(REDIS_BKEY_1)); assertArrayEquals(REDIS_BKEY_1, bhash.get(REDIS_BVALUE_0)); } }
apache-2.0
tsyma/pinpoint
commons-server/src/main/java/com/navercorp/pinpoint/common/server/bo/stat/AgentStatType.java
1870
/* * Copyright 2016 Naver Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.common.server.bo.stat; /** * @author HyunGil Jeong */ public enum AgentStatType { UNKNOWN(0, "Unknown"), JVM_GC(1, "JVM GC"), JVM_GC_DETAILED(2, "JVM GC Detailed"), CPU_LOAD(3, "Cpu Usage"), TRANSACTION((byte) 4, "Transaction"), ACTIVE_TRACE((byte) 5, "Active Trace"); public static final int TYPE_CODE_BYTE_LENGTH = 1; private final byte typeCode; private final String name; AgentStatType(int typeCode, String name) { if (typeCode < 0 || typeCode > 255) { throw new IllegalArgumentException("type code out of range (0~255)"); } this.typeCode = (byte) (typeCode & 0xFF); this.name = name; } public int getTypeCode() { return this.typeCode & 0xFF; } public byte getRawTypeCode() { return typeCode; } public String getName() { return name; } @Override public String toString() { return this.name; } public static AgentStatType fromTypeCode(byte typeCode) { for (AgentStatType agentStatType : AgentStatType.values()) { if (agentStatType.typeCode == typeCode) { return agentStatType; } } return UNKNOWN; } }
apache-2.0
AakashPradeep/phoenix
phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
31671
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.compile; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.phoenix.coprocessor.MetaDataProtocol; import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.parse.AliasedNode; import org.apache.phoenix.parse.BindTableNode; import org.apache.phoenix.parse.ColumnDef; import org.apache.phoenix.parse.CreateTableStatement; import org.apache.phoenix.parse.DMLStatement; import org.apache.phoenix.parse.DerivedTableNode; import org.apache.phoenix.parse.FamilyWildcardParseNode; import org.apache.phoenix.parse.JoinTableNode; import org.apache.phoenix.parse.NamedTableNode; import org.apache.phoenix.parse.ParseNode; import org.apache.phoenix.parse.ParseNodeFactory; import org.apache.phoenix.parse.SelectStatement; import org.apache.phoenix.parse.SingleTableStatement; import org.apache.phoenix.parse.TableName; import org.apache.phoenix.parse.TableNode; import org.apache.phoenix.parse.TableNodeVisitor; import org.apache.phoenix.parse.TableWildcardParseNode; import org.apache.phoenix.parse.WildcardParseNode; import org.apache.phoenix.query.ConnectionQueryServices; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.AmbiguousColumnException; import org.apache.phoenix.schema.AmbiguousTableException; import org.apache.phoenix.schema.ColumnFamilyNotFoundException; import org.apache.phoenix.schema.ColumnNotFoundException; import org.apache.phoenix.schema.ColumnRef; import org.apache.phoenix.schema.MetaDataClient; import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PColumnFamily; import org.apache.phoenix.schema.PColumnFamilyImpl; import org.apache.phoenix.schema.PColumnImpl; import org.apache.phoenix.schema.PName; import org.apache.phoenix.schema.PNameFactory; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.PTableImpl; import org.apache.phoenix.schema.PTableKey; import org.apache.phoenix.schema.PTableType; import org.apache.phoenix.schema.ProjectedColumn; import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.schema.TableNotFoundException; import org.apache.phoenix.schema.TableRef; import org.apache.phoenix.schema.PTable.IndexType; import org.apache.phoenix.util.Closeables; import org.apache.phoenix.util.IndexUtil; import org.apache.phoenix.util.LogUtil; import org.apache.phoenix.util.SchemaUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; /** * Validates FROM clause and builds a ColumnResolver for resolving column references * * * @since 0.1 */ public class FromCompiler { private static final Logger logger = LoggerFactory.getLogger(FromCompiler.class); public static final ColumnResolver EMPTY_TABLE_RESOLVER = new ColumnResolver() { @Override public List<TableRef> getTables() { return Collections.emptyList(); } @Override public TableRef resolveTable(String schemaName, String tableName) throws SQLException { throw new UnsupportedOperationException(); } @Override public ColumnRef resolveColumn(String schemaName, String tableName, String colName) throws SQLException { throw new UnsupportedOperationException(); } }; public static ColumnResolver getResolverForCreation(final CreateTableStatement statement, final PhoenixConnection connection) throws SQLException { TableName baseTable = statement.getBaseTableName(); if (baseTable == null) { return EMPTY_TABLE_RESOLVER; } NamedTableNode tableNode = NamedTableNode.create(null, baseTable, Collections.<ColumnDef>emptyList()); // Always use non-tenant-specific connection here try { SingleTableColumnResolver visitor = new SingleTableColumnResolver(connection, tableNode, true); return visitor; } catch (TableNotFoundException e) { // Used for mapped VIEW, since we won't be able to resolve that. // Instead, we create a table with just the dynamic columns. // A tenant-specific connection may not create a mapped VIEW. if (connection.getTenantId() == null && statement.getTableType() == PTableType.VIEW) { ConnectionQueryServices services = connection.getQueryServices(); byte[] fullTableName = SchemaUtil.getTableNameAsBytes(baseTable.getSchemaName(), baseTable.getTableName()); HTableInterface htable = null; try { htable = services.getTable(fullTableName); } catch (UnsupportedOperationException ignore) { throw e; // For Connectionless } finally { if (htable != null) Closeables.closeQuietly(htable); } tableNode = NamedTableNode.create(null, baseTable, statement.getColumnDefs()); return new SingleTableColumnResolver(connection, tableNode, e.getTimeStamp()); } throw e; } } /** * Iterate through the nodes in the FROM clause to build a column resolver used to lookup a column given the name * and alias. * * @param statement * the select statement * @return the column resolver * @throws SQLException * @throws SQLFeatureNotSupportedException * if unsupported constructs appear in the FROM clause * @throws TableNotFoundException * if table name not found in schema */ public static ColumnResolver getResolverForQuery(SelectStatement statement, PhoenixConnection connection) throws SQLException { TableNode fromNode = statement.getFrom(); if (fromNode instanceof NamedTableNode) return new SingleTableColumnResolver(connection, (NamedTableNode) fromNode, true, 1); MultiTableColumnResolver visitor = new MultiTableColumnResolver(connection, 1); fromNode.accept(visitor); return visitor; } public static ColumnResolver getResolver(NamedTableNode tableNode, PhoenixConnection connection) throws SQLException { SingleTableColumnResolver visitor = new SingleTableColumnResolver(connection, tableNode, true); return visitor; } public static ColumnResolver getResolver(SingleTableStatement statement, PhoenixConnection connection) throws SQLException { SingleTableColumnResolver visitor = new SingleTableColumnResolver(connection, statement.getTable(), true); return visitor; } public static ColumnResolver getResolverForCompiledDerivedTable(PhoenixConnection connection, TableRef tableRef, RowProjector projector) throws SQLException { List<PColumn> projectedColumns = new ArrayList<PColumn>(); PTable table = tableRef.getTable(); for (PColumn column : table.getColumns()) { Expression sourceExpression = projector.getColumnProjector(column.getPosition()).getExpression(); PColumnImpl projectedColumn = new PColumnImpl(column.getName(), column.getFamilyName(), sourceExpression.getDataType(), sourceExpression.getMaxLength(), sourceExpression.getScale(), sourceExpression.isNullable(), column.getPosition(), sourceExpression.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), column.getExpressionStr()); projectedColumns.add(projectedColumn); } PTable t = PTableImpl.makePTable(table, projectedColumns); return new SingleTableColumnResolver(connection, new TableRef(tableRef.getTableAlias(), t, tableRef.getLowerBoundTimeStamp(), tableRef.hasDynamicCols())); } public static ColumnResolver getResolver(TableRef tableRef) throws SQLException { SingleTableColumnResolver visitor = new SingleTableColumnResolver(tableRef); return visitor; } public static ColumnResolver getResolverForMutation(DMLStatement statement, PhoenixConnection connection) throws SQLException { /* * We validate the meta data at commit time for mutations, as this allows us to do many UPSERT VALUES calls * without hitting the server each time to check if the meta data is up-to-date. */ SingleTableColumnResolver visitor = new SingleTableColumnResolver(connection, statement.getTable(), false); return visitor; } public static ColumnResolver getResolverForProjectedTable(PTable projectedTable) { return new ProjectedTableColumnResolver(projectedTable); } private static class SingleTableColumnResolver extends BaseColumnResolver { private final List<TableRef> tableRefs; private final String alias; public SingleTableColumnResolver(PhoenixConnection connection, NamedTableNode table, long timeStamp) throws SQLException { super(connection, 0); List<PColumnFamily> families = Lists.newArrayListWithExpectedSize(table.getDynamicColumns().size()); for (ColumnDef def : table.getDynamicColumns()) { if (def.getColumnDefName().getFamilyName() != null) { families.add(new PColumnFamilyImpl(PNameFactory.newName(def.getColumnDefName().getFamilyName()),Collections.<PColumn>emptyList())); } } Long scn = connection.getSCN(); PTable theTable = new PTableImpl(connection.getTenantId(), table.getName().getSchemaName(), table.getName().getTableName(), scn == null ? HConstants.LATEST_TIMESTAMP : scn, families); theTable = this.addDynamicColumns(table.getDynamicColumns(), theTable); alias = null; tableRefs = ImmutableList.of(new TableRef(alias, theTable, timeStamp, !table.getDynamicColumns().isEmpty())); } public SingleTableColumnResolver(PhoenixConnection connection, NamedTableNode tableNode, boolean updateCacheImmediately) throws SQLException { this(connection, tableNode, updateCacheImmediately, 0); } public SingleTableColumnResolver(PhoenixConnection connection, NamedTableNode tableNode, boolean updateCacheImmediately, int tsAddition) throws SQLException { super(connection, tsAddition); alias = tableNode.getAlias(); TableRef tableRef = createTableRef(tableNode, updateCacheImmediately); tableRefs = ImmutableList.of(tableRef); } public SingleTableColumnResolver(PhoenixConnection connection, TableRef tableRef) { super(connection, 0); alias = tableRef.getTableAlias(); tableRefs = ImmutableList.of(tableRef); } public SingleTableColumnResolver(TableRef tableRef) throws SQLException { super(null, 0); alias = tableRef.getTableAlias(); tableRefs = ImmutableList.of(tableRef); } @Override public List<TableRef> getTables() { return tableRefs; } @Override public TableRef resolveTable(String schemaName, String tableName) throws SQLException { TableRef tableRef = tableRefs.get(0); /* * The only case we can definitely verify is when both a schemaName and a tableName * are provided. Otherwise, the tableName might be a column family. In this case, * this will be validated by resolveColumn. */ if (schemaName != null || tableName != null) { String resolvedTableName = tableRef.getTable().getTableName().getString(); String resolvedSchemaName = tableRef.getTable().getSchemaName().getString(); if (schemaName != null && tableName != null) { if ( ! ( schemaName.equals(resolvedSchemaName) && tableName.equals(resolvedTableName) ) && ! schemaName.equals(alias) ) { throw new TableNotFoundException(schemaName, tableName); } } } return tableRef; } @Override public ColumnRef resolveColumn(String schemaName, String tableName, String colName) throws SQLException { TableRef tableRef = tableRefs.get(0); boolean resolveCF = false; if (schemaName != null || tableName != null) { String resolvedTableName = tableRef.getTable().getTableName().getString(); String resolvedSchemaName = tableRef.getTable().getSchemaName().getString(); if (schemaName != null && tableName != null) { if ( ! ( schemaName.equals(resolvedSchemaName) && tableName.equals(resolvedTableName) )) { if (!(resolveCF = schemaName.equals(alias))) { throw new ColumnNotFoundException(schemaName, tableName, null, colName); } } } else { // schemaName == null && tableName != null if (tableName != null && !tableName.equals(alias) && (!tableName.equals(resolvedTableName) || !resolvedSchemaName.equals(""))) { resolveCF = true; } } } PColumn column = resolveCF ? tableRef.getTable().getColumnFamily(tableName).getColumn(colName) : tableRef.getTable().getColumn(colName); return new ColumnRef(tableRef, column.getPosition()); } } private static abstract class BaseColumnResolver implements ColumnResolver { protected final PhoenixConnection connection; protected final MetaDataClient client; // Fudge factor to add to current time we calculate. We need this when we do a SELECT // on Windows because the millis timestamp granularity is so bad we sometimes won't // get the data back that we just upsert. private final int tsAddition; private BaseColumnResolver(PhoenixConnection connection, int tsAddition) { this.connection = connection; this.client = connection == null ? null : new MetaDataClient(connection); this.tsAddition = tsAddition; } protected TableRef createTableRef(NamedTableNode tableNode, boolean updateCacheImmediately) throws SQLException { String tableName = tableNode.getName().getTableName(); String schemaName = tableNode.getName().getSchemaName(); long timeStamp = QueryConstants.UNSET_TIMESTAMP; String fullTableName = SchemaUtil.getTableName(schemaName, tableName); PName tenantId = connection.getTenantId(); PTable theTable = null; if (updateCacheImmediately || connection.getAutoCommit()) { MetaDataMutationResult result = client.updateCache(schemaName, tableName); timeStamp = result.getMutationTime(); theTable = result.getTable(); if (theTable == null) { throw new TableNotFoundException(schemaName, tableName, timeStamp); } } else { try { theTable = connection.getMetaDataCache().getTable(new PTableKey(tenantId, fullTableName)); } catch (TableNotFoundException e1) { if (tenantId != null) { // Check with null tenantId next try { theTable = connection.getMetaDataCache().getTable(new PTableKey(null, fullTableName)); } catch (TableNotFoundException e2) { } } } // We always attempt to update the cache in the event of a TableNotFoundException if (theTable == null) { MetaDataMutationResult result = client.updateCache(schemaName, tableName); if (result.wasUpdated()) { timeStamp = result.getMutationTime(); theTable = result.getTable(); } } if (theTable == null) { throw new TableNotFoundException(schemaName, tableName, timeStamp); } } // Add any dynamic columns to the table declaration List<ColumnDef> dynamicColumns = tableNode.getDynamicColumns(); theTable = addDynamicColumns(dynamicColumns, theTable); if (timeStamp != QueryConstants.UNSET_TIMESTAMP) { timeStamp += tsAddition; } TableRef tableRef = new TableRef(tableNode.getAlias(), theTable, timeStamp, !dynamicColumns.isEmpty()); if (logger.isDebugEnabled() && timeStamp != QueryConstants.UNSET_TIMESTAMP) { logger.debug(LogUtil.addCustomAnnotations("Re-resolved stale table " + fullTableName + " with seqNum " + tableRef.getTable().getSequenceNumber() + " at timestamp " + tableRef.getTable().getTimeStamp() + " with " + tableRef.getTable().getColumns().size() + " columns: " + tableRef.getTable().getColumns(), connection)); } return tableRef; } protected PTable addDynamicColumns(List<ColumnDef> dynColumns, PTable theTable) throws SQLException { if (!dynColumns.isEmpty()) { List<PColumn> allcolumns = new ArrayList<PColumn>(); List<PColumn> existingColumns = theTable.getColumns(); // Need to skip the salting column, as it's added in the makePTable call below allcolumns.addAll(theTable.getBucketNum() == null ? existingColumns : existingColumns.subList(1, existingColumns.size())); // Position still based on with the salting columns int position = existingColumns.size(); PName defaultFamilyName = PNameFactory.newName(SchemaUtil.getEmptyColumnFamily(theTable)); for (ColumnDef dynColumn : dynColumns) { PName familyName = defaultFamilyName; PName name = PNameFactory.newName(dynColumn.getColumnDefName().getColumnName()); String family = dynColumn.getColumnDefName().getFamilyName(); if (family != null) { theTable.getColumnFamily(family); // Verifies that column family exists familyName = PNameFactory.newName(family); } allcolumns.add(new PColumnImpl(name, familyName, dynColumn.getDataType(), dynColumn.getMaxLength(), dynColumn.getScale(), dynColumn.isNull(), position, dynColumn.getSortOrder(), dynColumn.getArraySize(), null, false, dynColumn.getExpression())); position++; } theTable = PTableImpl.makePTable(theTable, allcolumns); } return theTable; } } private static class MultiTableColumnResolver extends BaseColumnResolver implements TableNodeVisitor<Void> { protected final ListMultimap<String, TableRef> tableMap; protected final List<TableRef> tables; private MultiTableColumnResolver(PhoenixConnection connection, int tsAddition) { super(connection, tsAddition); tableMap = ArrayListMultimap.<String, TableRef> create(); tables = Lists.newArrayList(); } @Override public List<TableRef> getTables() { return tables; } @Override public Void visit(BindTableNode boundTableNode) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public Void visit(JoinTableNode joinNode) throws SQLException { joinNode.getLHS().accept(this); joinNode.getRHS().accept(this); return null; } @Override public Void visit(NamedTableNode tableNode) throws SQLException { String alias = tableNode.getAlias(); TableRef tableRef = createTableRef(tableNode, true); PTable theTable = tableRef.getTable(); if (alias != null) { tableMap.put(alias, tableRef); } String name = theTable.getName().getString(); //avoid having one name mapped to two identical TableRef. if (alias == null || !alias.equals(name)) { tableMap.put(name, tableRef); } tables.add(tableRef); return null; } @Override public Void visit(DerivedTableNode subselectNode) throws SQLException { List<AliasedNode> selectNodes = subselectNode.getSelect().getSelect(); List<PColumn> columns = new ArrayList<PColumn>(); int position = 0; for (AliasedNode aliasedNode : selectNodes) { String alias = aliasedNode.getAlias(); if (alias == null) { ParseNode node = aliasedNode.getNode(); if (node instanceof WildcardParseNode || node instanceof TableWildcardParseNode || node instanceof FamilyWildcardParseNode) throw new SQLException("Encountered wildcard in subqueries."); alias = SchemaUtil.normalizeIdentifier(node.getAlias()); } if (alias == null) { // Use position as column name for anonymous columns, which can be // referenced by an outer wild-card select. alias = String.valueOf(position); } PColumnImpl column = new PColumnImpl(PNameFactory.newName(alias), PNameFactory.newName(QueryConstants.DEFAULT_COLUMN_FAMILY), null, 0, 0, true, position++, SortOrder.ASC, null, null, false, null); columns.add(column); } PTable t = PTableImpl.makePTable(null, PName.EMPTY_NAME, PName.EMPTY_NAME, PTableType.SUBQUERY, null, MetaDataProtocol.MIN_TABLE_TIMESTAMP, PTable.INITIAL_SEQ_NUM, null, null, columns, null, null, Collections.<PTable>emptyList(), false, Collections.<PName>emptyList(), null, null, false, false, false, null, null, null); String alias = subselectNode.getAlias(); TableRef tableRef = new TableRef(alias, t, MetaDataProtocol.MIN_TABLE_TIMESTAMP, false); tableMap.put(alias, tableRef); tables.add(tableRef); return null; } private static class ColumnFamilyRef { private final TableRef tableRef; private final PColumnFamily family; ColumnFamilyRef(TableRef tableRef, PColumnFamily family) { this.tableRef = tableRef; this.family = family; } public TableRef getTableRef() { return tableRef; } public PColumnFamily getFamily() { return family; } } @Override public TableRef resolveTable(String schemaName, String tableName) throws SQLException { String fullTableName = SchemaUtil.getTableName(schemaName, tableName); List<TableRef> tableRefs = tableMap.get(fullTableName); if (tableRefs.size() == 0) { throw new TableNotFoundException(fullTableName); } else if (tableRefs.size() > 1) { throw new AmbiguousTableException(tableName); } else { return tableRefs.get(0); } } private ColumnFamilyRef resolveColumnFamily(String tableName, String cfName) throws SQLException { if (tableName == null) { ColumnFamilyRef theColumnFamilyRef = null; Iterator<TableRef> iterator = tables.iterator(); while (iterator.hasNext()) { TableRef tableRef = iterator.next(); try { PColumnFamily columnFamily = tableRef.getTable().getColumnFamily(cfName); if (theColumnFamilyRef != null) { throw new TableNotFoundException(cfName); } theColumnFamilyRef = new ColumnFamilyRef(tableRef, columnFamily); } catch (ColumnFamilyNotFoundException e) {} } if (theColumnFamilyRef != null) { return theColumnFamilyRef; } throw new TableNotFoundException(cfName); } else { TableRef tableRef = resolveTable(null, tableName); PColumnFamily columnFamily = tableRef.getTable().getColumnFamily(cfName); return new ColumnFamilyRef(tableRef, columnFamily); } } @Override public ColumnRef resolveColumn(String schemaName, String tableName, String colName) throws SQLException { if (tableName == null) { int theColumnPosition = -1; TableRef theTableRef = null; Iterator<TableRef> iterator = tables.iterator(); while (iterator.hasNext()) { TableRef tableRef = iterator.next(); try { PColumn column = tableRef.getTable().getColumn(colName); if (theTableRef != null) { throw new AmbiguousColumnException(colName); } theTableRef = tableRef; theColumnPosition = column.getPosition(); } catch (ColumnNotFoundException e) { } } if (theTableRef != null) { return new ColumnRef(theTableRef, theColumnPosition); } throw new ColumnNotFoundException(colName); } else { try { TableRef tableRef = resolveTable(schemaName, tableName); PColumn column = tableRef.getTable().getColumn(colName); return new ColumnRef(tableRef, column.getPosition()); } catch (TableNotFoundException e) { // Try using the tableName as a columnFamily reference instead ColumnFamilyRef cfRef = resolveColumnFamily(schemaName, tableName); PColumn column = cfRef.getFamily().getColumn(colName); return new ColumnRef(cfRef.getTableRef(), column.getPosition()); } } } } private static class ProjectedTableColumnResolver extends MultiTableColumnResolver { private final boolean isLocalIndex; private final List<TableRef> theTableRefs; private final Map<ColumnRef, Integer> columnRefMap; private ProjectedTableColumnResolver(PTable projectedTable) { super(null, 0); Preconditions.checkArgument(projectedTable.getType() == PTableType.PROJECTED); this.isLocalIndex = projectedTable.getIndexType() == IndexType.LOCAL; this.columnRefMap = new HashMap<ColumnRef, Integer>(); long ts = Long.MAX_VALUE; for (int i = projectedTable.getBucketNum() == null ? 0 : 1; i < projectedTable.getColumns().size(); i++) { PColumn column = projectedTable.getColumns().get(i); ColumnRef colRef = ((ProjectedColumn) column).getSourceColumnRef(); TableRef tableRef = colRef.getTableRef(); if (!tables.contains(tableRef)) { String alias = tableRef.getTableAlias(); if (alias != null) { this.tableMap.put(alias, tableRef); } String name = tableRef.getTable().getName().getString(); if (alias == null || !alias.equals(name)) { tableMap.put(name, tableRef); } tables.add(tableRef); if (tableRef.getLowerBoundTimeStamp() < ts) { ts = tableRef.getLowerBoundTimeStamp(); } } this.columnRefMap.put(new ColumnRef(tableRef, colRef.getColumnPosition()), column.getPosition()); } this.theTableRefs = ImmutableList.of(new TableRef(ParseNodeFactory.createTempAlias(), projectedTable, ts, false)); } @Override public List<TableRef> getTables() { return theTableRefs; } @Override public ColumnRef resolveColumn(String schemaName, String tableName, String colName) throws SQLException { ColumnRef colRef; try { colRef = super.resolveColumn(schemaName, tableName, colName); } catch (ColumnNotFoundException e) { // This could be a ColumnRef for local index data column. TableRef tableRef = isLocalIndex ? super.getTables().get(0) : super.resolveTable(schemaName, tableName); if (tableRef.getTable().getIndexType() == IndexType.LOCAL) { try { TableRef parentTableRef = super.resolveTable( tableRef.getTable().getSchemaName().getString(), tableRef.getTable().getParentTableName().getString()); colRef = new ColumnRef(parentTableRef, IndexUtil.getDataColumnFamilyName(colName), IndexUtil.getDataColumnName(colName)); } catch (TableNotFoundException te) { throw e; } } else { throw e; } } Integer position = columnRefMap.get(colRef); if (position == null) throw new ColumnNotFoundException(colName); return new ColumnRef(theTableRefs.get(0), position); } } }
apache-2.0
NSAmelchev/ignite
modules/core/src/main/java/org/apache/ignite/mxbean/MXBeanParameter.java
1376
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.mxbean; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Provides name and description for MBean method parameter. */ @Documented @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.PARAMETER}) public @interface MXBeanParameter { /** * @return Parameter name. */ String name(); /** * @return Parameter description. */ String description(); }
apache-2.0
john9x/jdbi
core/src/main/java/org/jdbi/v3/core/argument/EnumArgumentFactory.java
3531
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdbi.v3.core.argument; import java.lang.reflect.Type; import java.sql.Types; import java.util.Optional; import java.util.function.Function; import org.jdbi.v3.core.config.ConfigRegistry; import org.jdbi.v3.core.enums.DatabaseValue; import org.jdbi.v3.core.enums.EnumStrategy; import org.jdbi.v3.core.internal.EnumStrategies; import org.jdbi.v3.core.internal.exceptions.Unchecked; import org.jdbi.v3.core.qualifier.QualifiedType; class EnumArgumentFactory implements QualifiedArgumentFactory { @Override @SuppressWarnings("unchecked") public Optional<Argument> build(QualifiedType<?> givenType, Object value, ConfigRegistry config) { return ifEnum(givenType.getType()) .flatMap(clazz -> makeEnumArgument((QualifiedType<Enum>) givenType, (Enum) value, config)); } @SuppressWarnings("unchecked") static <E extends Enum<E>> Optional<Class<E>> ifEnum(Type type) { if (type instanceof Class<?>) { final Class<?> cast = (Class<?>) type; if (Enum.class.isAssignableFrom(cast)) { return Optional.of((Class<E>) cast); } } return Optional.empty(); } private static <E extends Enum<E>> Optional<Argument> makeEnumArgument(QualifiedType<E> givenType, E value, ConfigRegistry config) { boolean byName = EnumStrategy.BY_NAME == config.get(EnumStrategies.class).findStrategy(givenType); return byName ? byName(value, config) : byOrdinal(value, config); } private static <E extends Enum<E>> Optional<Argument> byName(E value, ConfigRegistry config) { return makeArgument(Types.VARCHAR, String.class, value, EnumArgumentFactory::annotatedValue, config); } private static <E extends Enum<E>> String annotatedValue(E e) { return Optional.of(e.getDeclaringClass()) .map(Unchecked.function(type -> type.getField(e.name()))) .map(field -> field.getAnnotation(DatabaseValue.class)) .map(DatabaseValue::value) .orElse(e.name()); } private static <E extends Enum<E>> Optional<Argument> byOrdinal(E value, ConfigRegistry config) { return makeArgument(Types.INTEGER, Integer.class, value, E::ordinal, config); } private static <A, E extends Enum<E>> Optional<Argument> makeArgument(int nullType, Class<A> attributeType, E value, Function<E, A> transform, ConfigRegistry config) { if (value == null) { return Optional.of(new NullArgument(nullType)); } return config.get(Arguments.class).findFor(attributeType, transform.apply(value)); } }
apache-2.0
maksimov/dasein-cloud-core
src/main/java/org/dasein/cloud/InvalidStateException.java
1378
/** * Copyright (C) 2009-2016 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud; import javax.annotation.Nonnull; /** * An error which has been caused by the resource being in an incompatible state for the operation * User: daniellemayne * Date: 27/11/2015 * Time: 15:21 */ public class InvalidStateException extends CloudException { public InvalidStateException(String msg) { super(msg); this.errorType = CloudErrorType.INVALID_STATE; } public InvalidStateException(@Nonnull String msg, @Nonnull Throwable cause) { super(msg, cause); this.errorType = CloudErrorType.INVALID_STATE; } }
apache-2.0
mashuai/Open-Source-Research
Javac2007/流程/parser/Parser/illegal.java
483
/** Report an illegal start of expression/type error at given position. */ JCExpression illegal(int pos) { setErrorEndPos(S.pos()); if ((mode & EXPR) != 0) return syntaxError(pos, "illegal.start.of.expr"); else return syntaxError(pos, "illegal.start.of.type"); } /** Report an illegal start of expression/type error at current position. */ JCExpression illegal() { return illegal(S.pos()); }
apache-2.0
jskora/nifi
nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncryptContent.java
26716
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import java.io.ByteArrayInputStream; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.lang.reflect.Method; import java.nio.file.Paths; import java.security.Security; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Objects; import org.apache.commons.codec.binary.Hex; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.security.util.EncryptionMethod; import org.apache.nifi.security.util.KeyDerivationFunction; import org.apache.nifi.security.util.crypto.CipherUtility; import org.apache.nifi.security.util.crypto.PasswordBasedEncryptor; import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.MockProcessContext; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.bouncycastle.bcpg.BCPGInputStream; import org.bouncycastle.bcpg.SymmetricKeyEncSessionPacket; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.bouncycastle.openpgp.PGPUtil.getDecoderStream; import static org.junit.Assert.fail; public class TestEncryptContent { private static final Logger logger = LoggerFactory.getLogger(TestEncryptContent.class); private static AllowableValue[] getPGPCipherList() { try{ Method method = EncryptContent.class.getDeclaredMethod("buildPGPSymmetricCipherAllowableValues"); method.setAccessible(true); return ((AllowableValue[]) method.invoke(null)); } catch (Exception e){ logger.error("Cannot access buildPGPSymmetricCipherAllowableValues", e); fail("Cannot access buildPGPSymmetricCipherAllowableValues"); } return null; } @Before public void setUp() { Security.addProvider(new BouncyCastleProvider()); } @Test public void testRoundTrip() throws IOException { final TestRunner testRunner = TestRunners.newTestRunner(new EncryptContent()); testRunner.setProperty(EncryptContent.PASSWORD, "short"); testRunner.setProperty(EncryptContent.KEY_DERIVATION_FUNCTION, KeyDerivationFunction.NIFI_LEGACY.name()); // Must be allowed or short password will cause validation errors testRunner.setProperty(EncryptContent.ALLOW_WEAK_CRYPTO, "allowed"); for (final EncryptionMethod encryptionMethod : EncryptionMethod.values()) { if (encryptionMethod.isUnlimitedStrength()) { continue; // cannot test unlimited strength in unit tests because it's not enabled by the JVM by default. } // KeyedCiphers tested in TestEncryptContentGroovy.groovy if (encryptionMethod.isKeyedCipher()) { continue; } logger.info("Attempting {}", encryptionMethod.name()); testRunner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, encryptionMethod.name()); testRunner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); testRunner.enqueue(Paths.get("src/test/resources/hello.txt")); testRunner.clearTransferState(); testRunner.run(); testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); testRunner.assertQueueEmpty(); testRunner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); testRunner.enqueue(flowFile); testRunner.clearTransferState(); testRunner.run(); testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); logger.info("Successfully decrypted {}", encryptionMethod.name()); flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(new File("src/test/resources/hello.txt")); } } @Test public void testPGPCiphersRoundTrip() { final TestRunner testRunner = TestRunners.newTestRunner(new EncryptContent()); testRunner.setProperty(EncryptContent.PASSWORD, "passwordpassword"); // a >=16 characters password testRunner.setProperty(EncryptContent.KEY_DERIVATION_FUNCTION, KeyDerivationFunction.NONE.name()); List<String> pgpAlgorithms = new ArrayList<>(); pgpAlgorithms.add("PGP"); pgpAlgorithms.add("PGP_ASCII_ARMOR"); for (String algorithm : pgpAlgorithms) { testRunner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, algorithm); for (AllowableValue cipher : Objects.requireNonNull(getPGPCipherList())) { testRunner.setProperty(EncryptContent.PGP_SYMMETRIC_ENCRYPTION_CIPHER, cipher.getValue()); testRunner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); testRunner.enqueue("A cool plaintext!"); testRunner.clearTransferState(); testRunner.run(); testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); testRunner.assertQueueEmpty(); testRunner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); // Encryption cipher is inferred from ciphertext, this property deliberately set a fixed cipher to prove // the output will still be correct testRunner.setProperty(EncryptContent.PGP_SYMMETRIC_ENCRYPTION_CIPHER, "1"); testRunner.enqueue(flowFile); testRunner.clearTransferState(); testRunner.run(); testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); flowFile.assertContentEquals("A cool plaintext!"); } } } @Test public void testPGPCiphers() throws Exception { final TestRunner testRunner = TestRunners.newTestRunner(new EncryptContent()); testRunner.setProperty(EncryptContent.PASSWORD, "passwordpassword"); // a >= 16 characters password testRunner.setProperty(EncryptContent.KEY_DERIVATION_FUNCTION, KeyDerivationFunction.NONE.name()); List<String> pgpAlgorithms = new ArrayList<>(); pgpAlgorithms.add("PGP"); pgpAlgorithms.add("PGP_ASCII_ARMOR"); for (String algorithm : pgpAlgorithms) { testRunner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, algorithm); for (AllowableValue cipher : Objects.requireNonNull(getPGPCipherList())) { testRunner.setProperty(EncryptContent.PGP_SYMMETRIC_ENCRYPTION_CIPHER, cipher.getValue()); testRunner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); testRunner.enqueue("A cool plaintext!"); testRunner.clearTransferState(); testRunner.run(); testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); testRunner.assertQueueEmpty(); // Other than the round trip, checks that the provided cipher is actually used, inferring it from the ciphertext InputStream ciphertext = new ByteArrayInputStream(flowFile.toByteArray()); BCPGInputStream pgpin = new BCPGInputStream(getDecoderStream(ciphertext)); assert pgpin.nextPacketTag() == 3; assert ((SymmetricKeyEncSessionPacket) pgpin.readPacket()).getEncAlgorithm() == Integer.valueOf(cipher.getValue()); pgpin.close(); } } } @Test public void testShouldDetermineMaxKeySizeForAlgorithms() throws IOException { // Arrange final String AES_ALGORITHM = EncryptionMethod.MD5_256AES.getAlgorithm(); final String DES_ALGORITHM = EncryptionMethod.MD5_DES.getAlgorithm(); final int AES_MAX_LENGTH = PasswordBasedEncryptor.supportsUnlimitedStrength() ? Integer.MAX_VALUE : 128; final int DES_MAX_LENGTH = PasswordBasedEncryptor.supportsUnlimitedStrength() ? Integer.MAX_VALUE : 64; // Act int determinedAESMaxLength = PasswordBasedEncryptor.getMaxAllowedKeyLength(AES_ALGORITHM); int determinedTDESMaxLength = PasswordBasedEncryptor.getMaxAllowedKeyLength(DES_ALGORITHM); // Assert assert determinedAESMaxLength == AES_MAX_LENGTH; assert determinedTDESMaxLength == DES_MAX_LENGTH; } @Test public void testShouldDecryptOpenSSLRawSalted() throws IOException { // Arrange Assume.assumeTrue("Test is being skipped due to this JVM lacking JCE Unlimited Strength Jurisdiction Policy file.", PasswordBasedEncryptor.supportsUnlimitedStrength()); final TestRunner testRunner = TestRunners.newTestRunner(new EncryptContent()); final String password = "thisIsABadPassword"; final EncryptionMethod method = EncryptionMethod.MD5_256AES; final KeyDerivationFunction kdf = KeyDerivationFunction.OPENSSL_EVP_BYTES_TO_KEY; testRunner.setProperty(EncryptContent.PASSWORD, password); testRunner.setProperty(EncryptContent.KEY_DERIVATION_FUNCTION, kdf.name()); testRunner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, method.name()); testRunner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); // Act testRunner.enqueue(Paths.get("src/test/resources/TestEncryptContent/salted_raw.enc")); testRunner.clearTransferState(); testRunner.run(); // Assert testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); testRunner.assertQueueEmpty(); MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); logger.info("Decrypted contents (hex): {}", Hex.encodeHexString(flowFile.toByteArray())); logger.info("Decrypted contents: {}", new String(flowFile.toByteArray(), "UTF-8")); // Assert flowFile.assertContentEquals(new File("src/test/resources/TestEncryptContent/plain.txt")); } @Test public void testShouldDecryptOpenSSLRawUnsalted() throws IOException { // Arrange Assume.assumeTrue("Test is being skipped due to this JVM lacking JCE Unlimited Strength Jurisdiction Policy file.", PasswordBasedEncryptor.supportsUnlimitedStrength()); final TestRunner testRunner = TestRunners.newTestRunner(new EncryptContent()); final String password = "thisIsABadPassword"; final EncryptionMethod method = EncryptionMethod.MD5_256AES; final KeyDerivationFunction kdf = KeyDerivationFunction.OPENSSL_EVP_BYTES_TO_KEY; testRunner.setProperty(EncryptContent.PASSWORD, password); testRunner.setProperty(EncryptContent.KEY_DERIVATION_FUNCTION, kdf.name()); testRunner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, method.name()); testRunner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); // Act testRunner.enqueue(Paths.get("src/test/resources/TestEncryptContent/unsalted_raw.enc")); testRunner.clearTransferState(); testRunner.run(); // Assert testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); testRunner.assertQueueEmpty(); MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); logger.info("Decrypted contents (hex): {}", Hex.encodeHexString(flowFile.toByteArray())); logger.info("Decrypted contents: {}", new String(flowFile.toByteArray(), "UTF-8")); // Assert flowFile.assertContentEquals(new File("src/test/resources/TestEncryptContent/plain.txt")); } @Test public void testDecryptShouldDefaultToBcrypt() throws IOException { // Arrange final TestRunner testRunner = TestRunners.newTestRunner(new EncryptContent()); // Assert Assert.assertEquals("Decrypt should default to Legacy KDF", testRunner.getProcessor().getPropertyDescriptor(EncryptContent.KEY_DERIVATION_FUNCTION .getName()).getDefaultValue(), KeyDerivationFunction.BCRYPT.name()); } @Test public void testDecryptSmallerThanSaltSize() { final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); runner.setProperty(EncryptContent.PASSWORD, "Hello, World!"); runner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); runner.setProperty(EncryptContent.KEY_DERIVATION_FUNCTION, KeyDerivationFunction.NIFI_LEGACY.name()); runner.enqueue(new byte[4]); runner.run(); runner.assertAllFlowFilesTransferred(EncryptContent.REL_FAILURE, 1); } @Test public void testPGPDecrypt() throws IOException { final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); runner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, EncryptionMethod.PGP_ASCII_ARMOR.name()); runner.setProperty(EncryptContent.PASSWORD, "Hello, World!"); runner.enqueue(Paths.get("src/test/resources/TestEncryptContent/text.txt.asc")); runner.run(); runner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1); final MockFlowFile flowFile = runner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/TestEncryptContent/text.txt")); } @Test public void testShouldValidatePGPPublicKeyringRequiresUserId() { // Arrange final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); Collection<ValidationResult> results; MockProcessContext pc; runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, EncryptionMethod.PGP.name()); runner.setProperty(EncryptContent.PUBLIC_KEYRING, "src/test/resources/TestEncryptContent/pubring.gpg"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); // Act results = pc.validate(); // Assert Assert.assertEquals(1, results.size()); ValidationResult vr = (ValidationResult) results.toArray()[0]; String expectedResult = " encryption without a " + EncryptContent.PASSWORD.getDisplayName() + " requires both " + EncryptContent.PUBLIC_KEYRING.getDisplayName() + " and " + EncryptContent.PUBLIC_KEY_USERID.getDisplayName(); String message = "'" + vr.toString() + "' contains '" + expectedResult + "'"; Assert.assertTrue(message, vr.toString().contains(expectedResult)); } @Test public void testShouldValidatePGPPublicKeyringExists() { // Arrange final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); Collection<ValidationResult> results; MockProcessContext pc; runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, EncryptionMethod.PGP.name()); runner.setProperty(EncryptContent.PUBLIC_KEYRING, "src/test/resources/TestEncryptContent/pubring.gpg.missing"); runner.setProperty(EncryptContent.PUBLIC_KEY_USERID, "USERID"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); // Act results = pc.validate(); // Assert Assert.assertEquals(1, results.size()); ValidationResult vr = (ValidationResult) results.toArray()[0]; String expectedResult = "java.io.FileNotFoundException"; String message = "'" + vr.toString() + "' contains '" + expectedResult + "'"; Assert.assertTrue(message, vr.toString().contains(expectedResult)); } @Test public void testShouldValidatePGPPublicKeyringIsProperFormat() { // Arrange final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); Collection<ValidationResult> results; MockProcessContext pc; runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, EncryptionMethod.PGP.name()); runner.setProperty(EncryptContent.PUBLIC_KEYRING, "src/test/resources/TestEncryptContent/text.txt"); runner.setProperty(EncryptContent.PUBLIC_KEY_USERID, "USERID"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); // Act results = pc.validate(); // Assert Assert.assertEquals(1, results.size()); ValidationResult vr = (ValidationResult) results.toArray()[0]; String expectedResult = " java.io.IOException: invalid header encountered"; String message = "'" + vr.toString() + "' contains '" + expectedResult + "'"; Assert.assertTrue(message, vr.toString().contains(expectedResult)); } @Test public void testShouldValidatePGPPublicKeyringContainsUserId() { // Arrange final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); Collection<ValidationResult> results; MockProcessContext pc; runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, EncryptionMethod.PGP.name()); runner.setProperty(EncryptContent.PUBLIC_KEYRING, "src/test/resources/TestEncryptContent/pubring.gpg"); runner.setProperty(EncryptContent.PUBLIC_KEY_USERID, "USERID"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); // Act results = pc.validate(); // Assert Assert.assertEquals(1, results.size()); ValidationResult vr = (ValidationResult) results.toArray()[0]; String expectedResult = "PGPException: Could not find a public key with the given userId"; String message = "'" + vr.toString() + "' contains '" + expectedResult + "'"; Assert.assertTrue(message, vr.toString().contains(expectedResult)); } @Test public void testShouldExtractPGPPublicKeyFromKeyring() { // Arrange final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); Collection<ValidationResult> results; MockProcessContext pc; runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, EncryptionMethod.PGP.name()); runner.setProperty(EncryptContent.PUBLIC_KEYRING, "src/test/resources/TestEncryptContent/pubring.gpg"); runner.setProperty(EncryptContent.PUBLIC_KEY_USERID, "NiFi PGP Test Key (Short test key for NiFi PGP unit tests) <alopresto.apache+test@gmail.com>"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); // Act results = pc.validate(); // Assert Assert.assertEquals(0, results.size()); } @Test public void testValidation() { final TestRunner runner = TestRunners.newTestRunner(EncryptContent.class); Collection<ValidationResult> results; MockProcessContext pc; runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); results = pc.validate(); Assert.assertEquals(results.toString(), 1, results.size()); for (final ValidationResult vr : results) { Assert.assertTrue(vr.toString() .contains(EncryptContent.PASSWORD.getDisplayName() + " is required when using algorithm")); } runner.enqueue(new byte[0]); final EncryptionMethod encryptionMethod = EncryptionMethod.MD5_128AES; runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, encryptionMethod.name()); runner.setProperty(EncryptContent.KEY_DERIVATION_FUNCTION, KeyDerivationFunction.NIFI_LEGACY.name()); runner.setProperty(EncryptContent.PASSWORD, "ThisIsAPasswordThatIsLongerThanSixteenCharacters"); pc = (MockProcessContext) runner.getProcessContext(); results = pc.validate(); if (!PasswordBasedEncryptor.supportsUnlimitedStrength()) { logger.info(results.toString()); Assert.assertEquals(1, results.size()); for (final ValidationResult vr : results) { Assert.assertTrue( "Did not successfully catch validation error of a long password in a non-JCE Unlimited Strength environment", vr.toString().contains("Password length greater than " + CipherUtility.getMaximumPasswordLengthForAlgorithmOnLimitedStrengthCrypto(encryptionMethod) + " characters is not supported by this JVM due to lacking JCE Unlimited Strength Jurisdiction Policy files.")); } } else { Assert.assertEquals(results.toString(), 0, results.size()); } runner.removeProperty(EncryptContent.PASSWORD); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, EncryptionMethod.PGP.name()); runner.setProperty(EncryptContent.PUBLIC_KEYRING, "src/test/resources/TestEncryptContent/text.txt"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); results = pc.validate(); Assert.assertEquals(1, results.size()); for (final ValidationResult vr : results) { Assert.assertTrue(vr.toString().contains( " encryption without a " + EncryptContent.PASSWORD.getDisplayName() + " requires both " + EncryptContent.PUBLIC_KEYRING.getDisplayName() + " and " + EncryptContent.PUBLIC_KEY_USERID.getDisplayName())); } // Legacy tests moved to individual tests to comply with new library // TODO: Move secring tests out to individual as well runner.removeProperty(EncryptContent.PUBLIC_KEYRING); runner.removeProperty(EncryptContent.PUBLIC_KEY_USERID); runner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); runner.setProperty(EncryptContent.PRIVATE_KEYRING, "src/test/resources/TestEncryptContent/secring.gpg"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); results = pc.validate(); Assert.assertEquals(1, results.size()); for (final ValidationResult vr : results) { Assert.assertTrue(vr.toString().contains( " decryption without a " + EncryptContent.PASSWORD.getDisplayName() + " requires both " + EncryptContent.PRIVATE_KEYRING.getDisplayName() + " and " + EncryptContent.PRIVATE_KEYRING_PASSPHRASE.getDisplayName())); } runner.setProperty(EncryptContent.PRIVATE_KEYRING_PASSPHRASE, "PASSWORD"); runner.enqueue(new byte[0]); pc = (MockProcessContext) runner.getProcessContext(); results = pc.validate(); Assert.assertEquals(1, results.size()); for (final ValidationResult vr : results) { Assert.assertTrue(vr.toString().contains( " could not be opened with the provided " + EncryptContent.PRIVATE_KEYRING_PASSPHRASE.getDisplayName())); } runner.removeProperty(EncryptContent.PRIVATE_KEYRING_PASSPHRASE); // This configuration is invalid because PGP_SYMMETRIC_ENCRYPTION_CIPHER is outside the allowed [1-13] interval runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, "PGP"); runner.setProperty(EncryptContent.PASSWORD, "PASSWORD"); runner.setProperty(EncryptContent.PGP_SYMMETRIC_ENCRYPTION_CIPHER, "256"); runner.assertNotValid(); // This configuration is invalid because PGP_SYMMETRIC_ENCRYPTION_CIPHER points to SAFER cipher which is unsupported runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, "PGP"); runner.setProperty(EncryptContent.PASSWORD, "PASSWORD"); runner.setProperty(EncryptContent.PGP_SYMMETRIC_ENCRYPTION_CIPHER, "5"); runner.assertNotValid(); // This configuration is valid runner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, "PGP"); runner.setProperty(EncryptContent.PASSWORD, "PASSWORD"); runner.removeProperty(EncryptContent.PGP_SYMMETRIC_ENCRYPTION_CIPHER); runner.assertValid(); // This configuration is valid because the default value will be used for PGP_SYMMETRIC_ENCRYPTION_CIPHER runner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE); runner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, "PGP"); runner.setProperty(EncryptContent.PASSWORD, "PASSWORD"); runner.removeProperty(EncryptContent.PGP_SYMMETRIC_ENCRYPTION_CIPHER); runner.assertValid(); } }
apache-2.0
monetate/druid
extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/variance/VarianceAggregatorFactory.java
12491
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.aggregation.variance; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.aggregation.AggregateCombiner; import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.AggregatorFactoryNotMergeableException; import org.apache.druid.query.aggregation.AggregatorUtil; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.query.aggregation.NoopAggregator; import org.apache.druid.query.aggregation.NoopBufferAggregator; import org.apache.druid.query.aggregation.ObjectAggregateCombiner; import org.apache.druid.query.aggregation.VectorAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import org.apache.druid.segment.ColumnInspector; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.ColumnValueSelector; import org.apache.druid.segment.NilColumnValueSelector; import org.apache.druid.segment.column.ColumnCapabilities; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.vector.VectorColumnSelectorFactory; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Objects; /** * */ @JsonTypeName("variance") public class VarianceAggregatorFactory extends AggregatorFactory { private static final String VARIANCE_TYPE_NAME = "variance"; public static final ColumnType TYPE = ColumnType.ofComplex(VARIANCE_TYPE_NAME); protected final String fieldName; protected final String name; @Nullable protected final String estimator; @Nullable private final String inputType; protected final boolean isVariancePop; @JsonCreator public VarianceAggregatorFactory( @JsonProperty("name") String name, @JsonProperty("fieldName") String fieldName, @JsonProperty("estimator") @Nullable String estimator, @JsonProperty("inputType") @Nullable String inputType ) { Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); this.name = name; this.fieldName = fieldName; this.estimator = estimator; this.isVariancePop = VarianceAggregatorCollector.isVariancePop(estimator); this.inputType = inputType; } @VisibleForTesting VarianceAggregatorFactory(String name, String fieldName) { this(name, fieldName, null, null); } /** * actual type is {@link VarianceAggregatorCollector} */ @Override public ColumnType getIntermediateType() { return TYPE; } @Override public ColumnType getResultType() { return ColumnType.DOUBLE; } @Override public int getMaxIntermediateSize() { return VarianceAggregatorCollector.getMaxIntermediateSize(); } @Override public Aggregator factorize(ColumnSelectorFactory metricFactory) { ColumnValueSelector<?> selector = metricFactory.makeColumnValueSelector(fieldName); if (selector instanceof NilColumnValueSelector) { return NoopAggregator.instance(); } final String type = getTypeString(metricFactory); if (ValueType.FLOAT.name().equalsIgnoreCase(type)) { return new VarianceAggregator.FloatVarianceAggregator(selector); } else if (ValueType.DOUBLE.name().equalsIgnoreCase(type)) { return new VarianceAggregator.DoubleVarianceAggregator(selector); } else if (ValueType.LONG.name().equalsIgnoreCase(type)) { return new VarianceAggregator.LongVarianceAggregator(selector); } else if (VARIANCE_TYPE_NAME.equalsIgnoreCase(type) || ValueType.COMPLEX.name().equalsIgnoreCase(type)) { return new VarianceAggregator.ObjectVarianceAggregator(selector); } throw new IAE( "Incompatible type for metric[%s], expected a float, double, long, or variance, but got a %s", fieldName, inputType ); } @Override public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) { ColumnValueSelector<?> selector = metricFactory.makeColumnValueSelector(fieldName); if (selector instanceof NilColumnValueSelector) { return NoopBufferAggregator.instance(); } final String type = getTypeString(metricFactory); if (ValueType.FLOAT.name().equalsIgnoreCase(type)) { return new VarianceBufferAggregator.FloatVarianceAggregator(selector); } else if (ValueType.DOUBLE.name().equalsIgnoreCase(type)) { return new VarianceBufferAggregator.DoubleVarianceAggregator(selector); } else if (ValueType.LONG.name().equalsIgnoreCase(type)) { return new VarianceBufferAggregator.LongVarianceAggregator(selector); } else if (VARIANCE_TYPE_NAME.equalsIgnoreCase(type) || ValueType.COMPLEX.name().equalsIgnoreCase(type)) { return new VarianceBufferAggregator.ObjectVarianceAggregator(selector); } throw new IAE( "Incompatible type for metric[%s], expected a float, double, long, or variance, but got a %s", fieldName, type ); } @Override public VectorAggregator factorizeVector(VectorColumnSelectorFactory selectorFactory) { final String type = getTypeString(selectorFactory); if (ValueType.FLOAT.name().equalsIgnoreCase(type)) { return new VarianceFloatVectorAggregator(selectorFactory.makeValueSelector(fieldName)); } else if (ValueType.DOUBLE.name().equalsIgnoreCase(type)) { return new VarianceDoubleVectorAggregator(selectorFactory.makeValueSelector(fieldName)); } else if (ValueType.LONG.name().equalsIgnoreCase(type)) { return new VarianceLongVectorAggregator(selectorFactory.makeValueSelector(fieldName)); } else if (VARIANCE_TYPE_NAME.equalsIgnoreCase(type) || ValueType.COMPLEX.name().equalsIgnoreCase(type)) { return new VarianceObjectVectorAggregator(selectorFactory.makeObjectSelector(fieldName)); } throw new IAE( "Incompatible type for metric[%s], expected a float, double, long, or variance, but got a %s", fieldName, type ); } @Override public boolean canVectorize(ColumnInspector columnInspector) { return true; } @Override public Object combine(Object lhs, Object rhs) { return VarianceAggregatorCollector.combineValues(lhs, rhs); } @Override public AggregateCombiner makeAggregateCombiner() { // VarianceAggregatorFactory.combine() delegates to VarianceAggregatorCollector.combineValues() and it doesn't check // for nulls, so this AggregateCombiner neither. return new ObjectAggregateCombiner<VarianceAggregatorCollector>() { private final VarianceAggregatorCollector combined = new VarianceAggregatorCollector(); @Override public void reset(ColumnValueSelector selector) { VarianceAggregatorCollector first = (VarianceAggregatorCollector) selector.getObject(); combined.copyFrom(first); } @Override public void fold(ColumnValueSelector selector) { VarianceAggregatorCollector other = (VarianceAggregatorCollector) selector.getObject(); combined.fold(other); } @Override public Class<VarianceAggregatorCollector> classOfObject() { return VarianceAggregatorCollector.class; } @Override public VarianceAggregatorCollector getObject() { return combined; } }; } @Override public AggregatorFactory getCombiningFactory() { return new VarianceFoldingAggregatorFactory(name, name, estimator); } @Override public List<AggregatorFactory> getRequiredColumns() { return Collections.singletonList(new VarianceAggregatorFactory(fieldName, fieldName, estimator, inputType)); } @Override public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException { if (Objects.equals(getName(), other.getName()) && other instanceof VarianceAggregatorFactory) { return getCombiningFactory(); } else { throw new AggregatorFactoryNotMergeableException(this, other); } } @Override public Comparator getComparator() { return VarianceAggregatorCollector.COMPARATOR; } @Nullable @Override public Object finalizeComputation(@Nullable Object object) { return object == null ? NullHandling.defaultDoubleValue() : ((VarianceAggregatorCollector) object).getVariance(isVariancePop); } @Override public Object deserialize(Object object) { if (object instanceof byte[]) { return VarianceAggregatorCollector.from(ByteBuffer.wrap((byte[]) object)); } else if (object instanceof ByteBuffer) { return VarianceAggregatorCollector.from((ByteBuffer) object); } else if (object instanceof String) { return VarianceAggregatorCollector.from( ByteBuffer.wrap(StringUtils.decodeBase64(StringUtils.toUtf8((String) object))) ); } return object; } @JsonProperty public String getFieldName() { return fieldName; } @Override @JsonProperty public String getName() { return name; } @Nullable @JsonProperty public String getEstimator() { return estimator; } @JsonProperty public String getInputType() { return inputType == null ? StringUtils.toLowerCase(ValueType.FLOAT.name()) : inputType; } @Override public List<String> requiredFields() { return Collections.singletonList(fieldName); } @Override public byte[] getCacheKey() { return new CacheKeyBuilder(AggregatorUtil.VARIANCE_CACHE_TYPE_ID) .appendString(fieldName) .appendString(inputType) .appendBoolean(isVariancePop) .appendString(estimator) .build(); } @Override public String toString() { return "VarianceAggregatorFactory{" + "fieldName='" + fieldName + '\'' + ", name='" + name + '\'' + ", estimator='" + estimator + '\'' + ", inputType='" + inputType + '\'' + ", isVariancePop=" + isVariancePop + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } VarianceAggregatorFactory that = (VarianceAggregatorFactory) o; return isVariancePop == that.isVariancePop && Objects.equals(fieldName, that.fieldName) && Objects.equals(name, that.name) && Objects.equals(estimator, that.estimator) && Objects.equals(inputType, that.inputType); } @Override public int hashCode() { return Objects.hash(fieldName, name, estimator, inputType, isVariancePop); } private String getTypeString(ColumnInspector columnInspector) { // todo: make this better... why strings? String type = inputType; if (type == null) { ColumnCapabilities capabilities = columnInspector.getColumnCapabilities(fieldName); if (capabilities != null) { type = StringUtils.toLowerCase(capabilities.getType().name()); } else { type = StringUtils.toLowerCase(ValueType.FLOAT.name()); } } return type; } }
apache-2.0
jerrinot/hazelcast
hazelcast/src/test/java/com/hazelcast/query/impl/extractor/predicates/SingleValueAllPredicatesReflectionTest.java
5195
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.query.impl.extractor.predicates; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.query.Predicates; import com.hazelcast.query.impl.extractor.AbstractExtractionTest; import com.hazelcast.test.HazelcastParallelParametersRunnerFactory; import com.hazelcast.test.HazelcastParametrizedRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.UseParametersRunnerFactory; import java.util.Collection; import static com.hazelcast.config.InMemoryFormat.BINARY; import static com.hazelcast.config.InMemoryFormat.OBJECT; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Index.NO_INDEX; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Index.ORDERED; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Index.UNORDERED; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Multivalue.SINGLE; import static com.hazelcast.query.impl.extractor.predicates.SingleValueDataStructure.Person; import static com.hazelcast.query.impl.extractor.predicates.SingleValueDataStructure.person; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; /** * Tests whether all predicates work with the extraction in attributes that are not collections. * <p> * Extraction mechanism: IN-BUILT REFLECTION EXTRACTION * <p> * This test is parametrised: * - each test is executed separately for BINARY and OBJECT in memory format * - each test is executed separately having each query using NO_INDEX, UNORDERED_INDEX and ORDERED_INDEX. * In this way we are spec-testing most of the reasonable combinations of the configuration of map & extraction. */ @RunWith(HazelcastParametrizedRunner.class) @UseParametersRunnerFactory(HazelcastParallelParametersRunnerFactory.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class SingleValueAllPredicatesReflectionTest extends AbstractExtractionTest { private static final Person BOND = person(130); private static final Person HUNT = person(120); @Parameters(name = "{index}: {0}, {1}, {2}") public static Collection<Object[]> data() { return axes( asList(BINARY, OBJECT), asList(NO_INDEX, UNORDERED, ORDERED), singletonList(SINGLE) ); } public SingleValueAllPredicatesReflectionTest(InMemoryFormat inMemoryFormat, Index index, Multivalue multivalue) { super(inMemoryFormat, index, multivalue); } @Test public void equals_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.equal("brain.iq", 130), mv), Expected.of(BOND)); } @Test public void between_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.between("brain.iq", 115, 135), mv), Expected.of(BOND, HUNT)); } @Test public void greater_less_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.lessEqual("brain.iq", 120), mv), Expected.of(HUNT)); } @Test public void in_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.in("brain.iq", 120, 121, 122), mv), Expected.of(HUNT)); } @Test public void notEqual_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.notEqual("brain.iq", 130), mv), Expected.of(HUNT)); } @Test public void like_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.like("brain.name", "brain12_"), mv), Expected.of(HUNT)); } @Test public void ilike_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.ilike("brain.name", "BR%130"), mv), Expected.of(BOND)); } @Test public void regex_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.regex("brain.name", "brain13.*"), mv), Expected.of(BOND)); } @Test public void key_equal_predicate() { execute(Input.of(BOND, HUNT), Query.of(Predicates.equal("__key", 0), mv), Expected.of(BOND)); } }
apache-2.0
emacarron/mybatis-3-no-local-cache
src/main/java/org/apache/ibatis/executor/keygen/package-info.java
717
/* * Copyright 2009-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * TODO fillme. */ package org.apache.ibatis.executor.keygen;
apache-2.0
aaime/jai-ext
jt-warp/src/main/java/it/geosolutions/jaiext/warp/WarpGeneralOpImage.java
90435
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library * http://www.geo-solutions.it/ * Copyright 2014 GeoSolutions * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.geosolutions.jaiext.warp; import it.geosolutions.jaiext.range.Range; import java.awt.image.ColorModel; import java.awt.image.DataBuffer; import java.awt.image.IndexColorModel; import java.awt.image.RenderedImage; import java.awt.image.SampleModel; import java.util.Map; import javax.media.jai.BorderExtender; import javax.media.jai.ImageLayout; import javax.media.jai.Interpolation; import javax.media.jai.PlanarImage; import javax.media.jai.ROI; import javax.media.jai.RasterAccessor; import javax.media.jai.Warp; import javax.media.jai.iterator.RandomIter; import com.sun.media.jai.util.ImageUtil; /** * An <code>OpImage</code> implementing the general "Warp" operation as described in <code>javax.media.jai.operator.WarpDescriptor</code>. It supports * all interpolation cases. * * <p> * The layout for the destination image may be specified via the <code>ImageLayout</code> parameter. However, only those settings suitable for this * operation will be used. The unsuitable settings will be replaced by default suitable values. An optional ROI object and a NoData Range can be used. * If a backward mapped pixel lies outside ROI or it is a NoData, then the destination pixel value is a background value. * * If the input image contains an IndexColorModel, then pixel values are taken directly from the input color table. * * @since EA2 * @see javax.media.jai.Warp * @see javax.media.jai.WarpOpImage * @see javax.media.jai.operator.WarpDescriptor * @see WarpRIF * */ @SuppressWarnings("unchecked") final class WarpGeneralOpImage extends WarpOpImage { private static final int NODATA_VALUE = 0; /** Color table representing source's IndexColorModel. */ private byte[][] ctable = null; /** LookupTable used for a faster NoData check */ private byte[][] byteLookupTable; /** * Constructs a WarpGeneralOpImage. * * @param source The source image. * @param extender A BorderExtender, or null. * @param extender A BorderExtender, or null. * @param layout The destination image layout. * @param warp An object defining the warp algorithm. * @param interp An object describing the interpolation method. * @param background background values to set. * @param roi input ROI object used. * @param noData NoData Range object used for checking if NoData are present. */ public WarpGeneralOpImage(RenderedImage source, BorderExtender extender, Map<?, ?> config, ImageLayout layout, Warp warp, Interpolation interp, double[] background, ROI sourceROI, Range noData) { super(source, layout, config, false, extender, interp, warp, background, sourceROI, noData); /* * If the source has IndexColorModel, get the RGB color table. Note, in this case, the source should have an integral data type. And dest * always has data type byte. */ ColorModel srcColorModel = source.getColorModel(); if (srcColorModel instanceof IndexColorModel) { IndexColorModel icm = (IndexColorModel) srcColorModel; ctable = new byte[3][icm.getMapSize()]; icm.getReds(ctable[0]); icm.getGreens(ctable[1]); icm.getBlues(ctable[2]); } /* * Selection of a destinationNoData value for each datatype */ //backgroundValues[b] = backgroundValues[0]; SampleModel sm = source.getSampleModel(); // Source image data Type int srcDataType = sm.getDataType(); // Creation of a lookuptable containing the values to use for no data if (hasNoData && srcDataType == DataBuffer.TYPE_BYTE) { int numBands = getNumBands(); byteLookupTable = new byte[numBands][256]; for (int b = 0; b < numBands; b++) { for (int i = 0; i < byteLookupTable[0].length; i++) { byte value = (byte) i; if (noDataRange.contains(value)) { byteLookupTable[b][i] = (byte) backgroundValues[b]; } else { byteLookupTable[b][i] = value; } } } } // Definition of the padding if (interp != null) { leftPad = interp.getLeftPadding(); rightPad = interp.getRightPadding(); topPad = interp.getTopPadding(); bottomPad = interp.getBottomPadding(); } else { leftPad = rightPad = topPad = bottomPad = 0; } } protected void computeRectByte(PlanarImage src, RasterAccessor dst, final RandomIter roiIter, boolean roiContainsTile) { // Setting of the Random iterator keeping into account the presence of the Borderextender int minX, maxX, minY, maxY; RandomIter iter; if (extended) { // Creation of an iterator on the image extended by the padding factors iter = getRandomIterator(src, leftPad, rightPad, topPad, bottomPad, extender); // Definition of the image bounds minX = src.getMinX(); maxX = src.getMaxX(); minY = src.getMinY(); maxY = src.getMaxY(); } else { // Creation of an iterator on the image iter = getRandomIterator(src, null); // Definition of the image bounds minX = src.getMinX() + leftPad; // Left padding maxX = src.getMaxX() - rightPad; // Right padding minY = src.getMinY() + topPad; // Top padding maxY = src.getMaxY() - bottomPad; // Bottom padding } final int dstWidth = dst.getWidth(); final int dstHeight = dst.getHeight(); final int dstBands = dst.getNumBands(); final int lineStride = dst.getScanlineStride(); final int pixelStride = dst.getPixelStride(); final int[] bandOffsets = dst.getBandOffsets(); final byte[][] data = dst.getByteDataArrays(); final float[] warpData = new float[2 * dstWidth]; int lineOffset = 0; int kwidth = interp.getWidth(); int kheight = interp.getHeight(); int precH = 1 << interp.getSubsampleBitsH(); int precV = 1 << interp.getSubsampleBitsV(); int[][] samples = new int[kheight][kwidth]; boolean roiWeight; if (ctable == null) { // source does not have IndexColorModel // ONLY VALID DATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { // Interpolation xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b) & 0xFF; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampByte(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b) & 0xFF; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil .clampByte(interp.interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // NODATA check // // for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { // If the value is a NODATA, is substituted with 0 samples[j][i] = byteLookupTable[b][iter.getSample(xint + i, yint + j, b) & 0xFF]; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampByte(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { // If the value is a NODATA, is substituted with 0 samples[j][i] = byteLookupTable[b][iter.getSample( xint + i, yint + j, b) & 0xFF]; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil .clampByte(interp.interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } pixelOffset += pixelStride; } } } } else { // source has IndexColorModel // ONLY VALID DATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { byte[] t = ctable[b]; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = t[iter.getSample(xint + i, yint + j, 0) & 0xFF] & 0xFF; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampByte(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { byte[] t = ctable[b]; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = t[iter.getSample(xint + i, yint + j, 0) & 0xFF] & 0xFF; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil .clampByte(interp.interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; // // NODATA check // // for (int b = 0; b < dstBands; b++) { byte[] t = ctable[b]; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { // If the value is a NODATA, is substituted with 0 samples[j][i] = byteLookupTable[b][t[iter.getSample(xint + i, yint + j, 0) & 0xFF] & 0xFF]; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampByte(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { byte[] t = ctable[b]; // // NODATA check // // for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { // If the value is a NODATA, is substituted with 0 samples[j][i] = byteLookupTable[b][t[iter.getSample(xint + i, yint + j, 0) & 0xFF] & 0xFF]; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil .clampByte(interp.interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } pixelOffset += pixelStride; } } } } iter.done(); } protected void computeRectUShort(PlanarImage src, RasterAccessor dst, final RandomIter roiIter, boolean roiContainsTile) { // Setting of the Random iterator keeping into account the presence of the Borderextender int minX, maxX, minY, maxY; RandomIter iter; if (extended) { // Creation of an iterator on the image extended by the padding factors iter = getRandomIterator(src, leftPad, rightPad, topPad, bottomPad, extender); // Definition of the image bounds minX = src.getMinX(); maxX = src.getMaxX(); minY = src.getMinY(); maxY = src.getMaxY(); } else { // Creation of an iterator on the image iter = getRandomIterator(src, null); // Definition of the image bounds minX = src.getMinX() + leftPad; // Left padding maxX = src.getMaxX() - rightPad; // Right padding minY = src.getMinY() + topPad; // Top padding maxY = src.getMaxY() - bottomPad; // Bottom padding } int kwidth = interp.getWidth(); int kheight = interp.getHeight(); int dstWidth = dst.getWidth(); int dstHeight = dst.getHeight(); int dstBands = dst.getNumBands(); int lineStride = dst.getScanlineStride(); int pixelStride = dst.getPixelStride(); int[] bandOffsets = dst.getBandOffsets(); short[][] data = dst.getShortDataArrays(); int precH = 1 << interp.getSubsampleBitsH(); int precV = 1 << interp.getSubsampleBitsV(); float[] warpData = new float[2 * dstWidth]; int[][] samples = new int[kheight][kwidth]; boolean roiWeight; int lineOffset = 0; // ONLY VALID DATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b) & 0xFFFF; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampUShort(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b) & 0xFFFF; } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil .clampUShort(interp.interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { int value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSample(xint + i, yint + j, b) & 0xFFFF; // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains((short) value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampUShort(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { int value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSample(xint + i, yint + j, b) & 0xFFFF; // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains((short) value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil .clampUShort(interp.interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } pixelOffset += pixelStride; } } } iter.done(); } protected void computeRectShort(PlanarImage src, RasterAccessor dst, final RandomIter roiIter, boolean roiContainsTile) { // Setting of the Random iterator keeping into account the presence of the Borderextender int minX, maxX, minY, maxY; RandomIter iter; if (extended) { // Creation of an iterator on the image extended by the padding factors iter = getRandomIterator(src, leftPad, rightPad, topPad, bottomPad, extender); // Definition of the image bounds minX = src.getMinX(); maxX = src.getMaxX(); minY = src.getMinY(); maxY = src.getMaxY(); } else { // Creation of an iterator on the image iter = getRandomIterator(src, null); // Definition of the image bounds minX = src.getMinX() + leftPad; // Left padding maxX = src.getMaxX() - rightPad; // Right padding minY = src.getMinY() + topPad; // Top padding maxY = src.getMaxY() - bottomPad; // Bottom padding } int kwidth = interp.getWidth(); int kheight = interp.getHeight(); int dstWidth = dst.getWidth(); int dstHeight = dst.getHeight(); int dstBands = dst.getNumBands(); int lineStride = dst.getScanlineStride(); int pixelStride = dst.getPixelStride(); int[] bandOffsets = dst.getBandOffsets(); short[][] data = dst.getShortDataArrays(); int precH = 1 << interp.getSubsampleBitsH(); int precV = 1 << interp.getSubsampleBitsV(); float[] warpData = new float[2 * dstWidth]; int[][] samples = new int[kheight][kwidth]; boolean roiWeight; int lineOffset = 0; // ONLY VALID DATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampShort(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampShort(interp .interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { int value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSample(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains((short) value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampShort(interp .interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { int value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSample(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains((short) value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = ImageUtil.clampShort(interp .interpolate(samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short)backgroundValues[b]; } } } pixelOffset += pixelStride; } } } iter.done(); } protected void computeRectInt(PlanarImage src, RasterAccessor dst, final RandomIter roiIter, boolean roiContainsTile) { // Setting of the Random iterator keeping into account the presence of the Borderextender int minX, maxX, minY, maxY; RandomIter iter; if (extended) { // Creation of an iterator on the image extended by the padding factors iter = getRandomIterator(src, leftPad, rightPad, topPad, bottomPad, extender); // Definition of the image bounds minX = src.getMinX(); maxX = src.getMaxX(); minY = src.getMinY(); maxY = src.getMaxY(); } else { // Creation of an iterator on the image iter = getRandomIterator(src, null); // Definition of the image bounds minX = src.getMinX() + leftPad; // Left padding maxX = src.getMaxX() - rightPad; // Right padding minY = src.getMinY() + topPad; // Top padding maxY = src.getMaxY() - bottomPad; // Bottom padding } int kwidth = interp.getWidth(); int kheight = interp.getHeight(); int dstWidth = dst.getWidth(); int dstHeight = dst.getHeight(); int dstBands = dst.getNumBands(); int lineStride = dst.getScanlineStride(); int pixelStride = dst.getPixelStride(); int[] bandOffsets = dst.getBandOffsets(); int[][] data = dst.getIntDataArrays(); int precH = 1 << interp.getSubsampleBitsH(); int precV = 1 << interp.getSubsampleBitsV(); float[] warpData = new float[2 * dstWidth]; int[][] samples = new int[kheight][kwidth]; boolean roiWeight; int lineOffset = 0; // ONLY VALID DATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSample(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate( samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int)backgroundValues[b]; } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { int value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSample(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains(value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { int value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); int xfrac = (int) ((sx - xint) * precH); int yfrac = (int) ((sy - yint) * precV); if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSample(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains(value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate( samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int)backgroundValues[b]; } } } pixelOffset += pixelStride; } } } iter.done(); } protected void computeRectFloat(PlanarImage src, RasterAccessor dst, final RandomIter roiIter, boolean roiContainsTile) { // Setting of the Random iterator keeping into account the presence of the Borderextender int minX, maxX, minY, maxY; RandomIter iter; if (extended) { // Creation of an iterator on the image extended by the padding factors iter = getRandomIterator(src, leftPad, rightPad, topPad, bottomPad, extender); // Definition of the image bounds minX = src.getMinX(); maxX = src.getMaxX(); minY = src.getMinY(); maxY = src.getMaxY(); } else { // Creation of an iterator on the image iter = getRandomIterator(src, null); // Definition of the image bounds minX = src.getMinX() + leftPad; // Left padding maxX = src.getMaxX() - rightPad; // Right padding minY = src.getMinY() + topPad; // Top padding maxY = src.getMaxY() - bottomPad; // Bottom padding } int kwidth = interp.getWidth(); int kheight = interp.getHeight(); int dstWidth = dst.getWidth(); int dstHeight = dst.getHeight(); int dstBands = dst.getNumBands(); int lineStride = dst.getScanlineStride(); int pixelStride = dst.getPixelStride(); int[] bandOffsets = dst.getBandOffsets(); float[][] data = dst.getFloatDataArrays(); float[] warpData = new float[2 * dstWidth]; float[][] samples = new float[kheight][kwidth]; boolean roiWeight; int lineOffset = 0; // ONLY VALID DATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSampleFloat(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSampleFloat(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate( samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float)backgroundValues[b]; } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { float value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSampleFloat(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains(value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { float value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float)backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSampleFloat(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains(value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate( samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float)backgroundValues[b]; } } } pixelOffset += pixelStride; } } } iter.done(); } protected void computeRectDouble(PlanarImage src, RasterAccessor dst, final RandomIter roiIter, boolean roiContainsTile) { // Setting of the Random iterator keeping into account the presence of the Borderextender int minX, maxX, minY, maxY; RandomIter iter; if (extended) { // Creation of an iterator on the image extended by the padding factors iter = getRandomIterator(src, leftPad, rightPad, topPad, bottomPad, extender); // Definition of the image bounds minX = src.getMinX(); maxX = src.getMaxX(); minY = src.getMinY(); maxY = src.getMaxY(); } else { // Creation of an iterator on the image iter = getRandomIterator(src, null); // Definition of the image bounds minX = src.getMinX() + leftPad; // Left padding maxX = src.getMaxX() - rightPad; // Right padding minY = src.getMinY() + topPad; // Top padding maxY = src.getMaxY() - bottomPad; // Bottom padding } int kwidth = interp.getWidth(); int kheight = interp.getHeight(); int dstWidth = dst.getWidth(); int dstHeight = dst.getHeight(); int dstBands = dst.getNumBands(); int lineStride = dst.getScanlineStride(); int pixelStride = dst.getPixelStride(); int[] bandOffsets = dst.getBandOffsets(); double[][] data = dst.getDoubleDataArrays(); float[] warpData = new float[2 * dstWidth]; double[][] samples = new double[kheight][kwidth]; boolean roiWeight; int lineOffset = 0; // ONLY VALID DATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSampleDouble(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { samples[j][i] = iter.getSampleDouble(xint + i, yint + j, b); } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate( samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { double value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSampleDouble(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains(value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate(samples, xfrac, yfrac)); } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { double value = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { float sx = warpData[count++]; float sy = warpData[count++]; int xint = floor(sx); int yint = floor(sy); float xfrac = sx - xint; float yfrac = sy - yint; if (xint < minX || xint >= maxX || yint < minY || yint >= maxY) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { xint -= leftPad; yint -= topPad; roiWeight = false; for (int j = 0; j < kheight; j++) { for (int i = 0; i < kwidth; i++) { int x = xint + i; int y = yint + j; if (roiBounds.contains(x, y)) { roiWeight |= roiIter.getSample(x, y, 0) > 0; } } } // // ROI check // // if (roiWeight) { for (int b = 0; b < dstBands; b++) { for (int j = 0; j < kheight; j++) { // // NODATA check // // for (int i = 0; i < kwidth; i++) { value = iter.getSampleDouble(xint + i, yint + j, b); // If the value is a NODATA, is substituted with 0 inside the kernel if (noDataRange.contains(value)) { samples[j][i] = NODATA_VALUE; } else { samples[j][i] = value; } } } data[b][pixelOffset + bandOffsets[b]] = (interp.interpolate( samples, xfrac, yfrac)); } } else { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } pixelOffset += pixelStride; } } } iter.done(); } }
apache-2.0
prasi-in/geode
geode-core/src/test/java/org/apache/geode/cache30/ClientMembershipDUnitTest.java
61868
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache30; import static org.apache.geode.distributed.ConfigurationProperties.*; import static org.apache.geode.test.dunit.Assert.*; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import org.awaitility.Awaitility; import org.apache.geode.distributed.internal.ServerLocation; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.test.junit.categories.ClientServerTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.InternalGemFireException; import org.apache.geode.Statistics; import org.apache.geode.StatisticsType; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.CacheException; import org.apache.geode.cache.Region; import org.apache.geode.cache.Scope; import org.apache.geode.cache.client.ClientCache; import org.apache.geode.cache.client.Pool; import org.apache.geode.cache.client.PoolManager; import org.apache.geode.distributed.*; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.internal.cache.tier.InternalClientMembership; import org.apache.geode.internal.cache.tier.sockets.AcceptorImpl; import org.apache.geode.internal.cache.tier.sockets.ServerConnection; import org.apache.geode.management.membership.ClientMembership; import org.apache.geode.management.membership.ClientMembershipEvent; import org.apache.geode.management.membership.ClientMembershipListener; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.IgnoredException; import org.apache.geode.test.dunit.Invoke; import org.apache.geode.test.dunit.NetworkUtils; import org.apache.geode.test.dunit.SerializableCallable; import org.apache.geode.test.dunit.SerializableRunnable; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.Wait; import org.apache.geode.test.junit.categories.DistributedTest; /** * Tests the ClientMembership API including ClientMembershipListener. * * @since GemFire 4.2.1 */ @Category({DistributedTest.class, ClientServerTest.class}) public class ClientMembershipDUnitTest extends ClientServerTestCase { protected static final boolean CLIENT = true; protected static final boolean SERVER = false; protected static final int JOINED = 0; protected static final int LEFT = 1; protected static final int CRASHED = 2; private static Properties properties; ServerLocation serverLocation = new ServerLocation("127.0.0.1", 0); @Override public final void postTearDownCacheTestCase() throws Exception { Invoke.invokeInEveryVM((() -> cleanup())); } public static void cleanup() { properties = null; InternalClientMembership.unregisterAllListeners(); } private void waitForAcceptsInProgressToBe(final int target) throws Exception { Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { int actual = getAcceptsInProgress(); if (actual == getAcceptsInProgress()) { return true; } return false; }); } protected int getAcceptsInProgress() { StatisticsType st = InternalDistributedSystem.getAnyInstance().findType("CacheServerStats"); Statistics[] s = InternalDistributedSystem.getAnyInstance().findStatisticsByType(st); return s[0].getInt("acceptsInProgress"); } protected static Socket meanSocket; /** * test that a server times out waiting for a handshake that never arrives. */ @Test public void testConnectionTimeout() throws Exception { IgnoredException.addIgnoredException("failed accepting client connection"); final Host host = Host.getHost(0); final String hostName = NetworkUtils.getServerHostName(host); final VM vm0 = host.getVM(0); System.setProperty(AcceptorImpl.ACCEPT_TIMEOUT_PROPERTY_NAME, "1000"); try { final int port = startBridgeServer(0); // AsyncInvocation ai = null; try { assertTrue(port != 0); SerializableRunnable createMeanSocket = new CacheSerializableRunnable("Connect to server with socket") { public void run2() throws CacheException { getCache(); // create a cache so we have stats System.out.println("connecting to cache server with socket"); try { InetAddress addr = InetAddress.getByName(hostName); meanSocket = new Socket(addr, port); } catch (Exception e) { throw new RuntimeException("Test failed to connect or was interrupted", e); } } }; SerializableRunnable closeMeanSocket = new CacheSerializableRunnable("close mean socket") { public void run2() throws CacheException { System.out.println("closing mean socket"); try { meanSocket.close(); } catch (IOException ignore) { } } }; assertEquals(0, getAcceptsInProgress()); System.out.println("creating mean socket"); vm0.invoke("Connect to server with socket", () -> createMeanSocket); try { System.out.println("waiting to see it connect on server"); waitForAcceptsInProgressToBe(1); } finally { System.out.println("closing mean socket"); vm0.invoke("close mean socket", () -> closeMeanSocket); } System.out.println("waiting to see accept to go away on server"); waitForAcceptsInProgressToBe(0); // now try it without a close. Server should timeout the mean connect System.out.println("creating mean socket 2"); vm0.invoke("Connect to server with socket", () -> createMeanSocket); try { System.out.println("waiting to see it connect on server 2"); waitForAcceptsInProgressToBe(1); System.out.println("waiting to see accept to go away on server without us closing"); waitForAcceptsInProgressToBe(0); } finally { System.out.println("closing mean socket 2"); vm0.invoke("close mean socket", () -> closeMeanSocket); } // SerializableRunnable denialOfService = new CacheSerializableRunnable("Do lots of // connects") { // public void run2() throws CacheException { // int connectionCount = 0; // ArrayList al = new ArrayList(60000); // try { // InetAddress addr = InetAddress.getLocalHost(); // for (;;) { // Socket s = new Socket(addr, port); // al.add(s); // connectionCount++; // getLogWriter().info("connected # " + connectionCount + " s=" + s); // // try { // // s.close(); // // } catch (IOException ignore) {} // } // } // catch (Exception e) { // getLogWriter().info("connected # " + connectionCount // + " stopped because of exception " + e); // Iterator it = al.iterator(); // while (it.hasNext()) { // Socket s = (Socket)it.next(); // try { // s.close(); // } catch (IOException ignore) {} // } // } // } // }; // // now pretend to do a denial of service attack by doing a bunch of connects // // really fast and see what that does to the server's fds. // getLogWriter().info("doing denial of service attach"); // vm0.invoke(denialOfService); // // @todo darrel: check fd limit? } finally { stopBridgeServers(getCache()); } } finally { System.getProperties().remove(AcceptorImpl.ACCEPT_TIMEOUT_PROPERTY_NAME); } } @Test public void testSynchronousEvents() throws Exception { getSystem(); InternalClientMembership.setForceSynchronous(true); try { doTestBasicEvents(); } finally { InternalClientMembership.setForceSynchronous(false); } } /** * Tests event notification methods on ClientMembership. */ @Test public void testBasicEvents() throws Exception { getSystem(); doTestBasicEvents(); } public void doTestBasicEvents() throws Exception { final boolean[] fired = new boolean[3]; final DistributedMember[] member = new DistributedMember[3]; final String[] memberId = new String[3]; final boolean[] isClient = new boolean[3]; ClientMembershipListener listener = new ClientMembershipListener() { public void memberJoined(ClientMembershipEvent event) { fired[JOINED] = true; member[JOINED] = event.getMember(); memberId[JOINED] = event.getMemberId(); isClient[JOINED] = event.isClient(); } public void memberLeft(ClientMembershipEvent event) { fired[LEFT] = true; member[LEFT] = event.getMember(); memberId[LEFT] = event.getMemberId(); isClient[LEFT] = event.isClient(); } public void memberCrashed(ClientMembershipEvent event) { fired[CRASHED] = true; member[CRASHED] = event.getMember(); memberId[CRASHED] = event.getMemberId(); isClient[CRASHED] = event.isClient(); } }; ClientMembership.registerClientMembershipListener(listener); // test JOIN for server InternalClientMembership.notifyServerJoined(serverLocation); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED]; }); assertTrue(fired[JOINED]); assertNotNull(member[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // test JOIN for client DistributedMember clientJoined = new TestDistributedMember("clientJoined"); InternalClientMembership.notifyClientJoined(clientJoined); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED]; }); assertTrue(fired[JOINED]); assertEquals(clientJoined, member[JOINED]); assertEquals(clientJoined.getId(), memberId[JOINED]); assertTrue(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // test LEFT for server InternalClientMembership.notifyServerLeft(serverLocation); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[LEFT]; }); assertFalse(fired[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertTrue(fired[LEFT]); assertNotNull(member[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // test LEFT for client DistributedMember clientLeft = new TestDistributedMember("clientLeft"); InternalClientMembership.notifyClientLeft(clientLeft); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[LEFT]; }); assertFalse(fired[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertTrue(fired[LEFT]); assertEquals(clientLeft, member[LEFT]); assertEquals(clientLeft.getId(), memberId[LEFT]); assertTrue(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // test CRASHED for server InternalClientMembership.notifyServerCrashed(serverLocation); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[CRASHED]; }); assertFalse(fired[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertTrue(fired[CRASHED]); assertNotNull(member[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // test CRASHED for client DistributedMember clientCrashed = new TestDistributedMember("clientCrashed"); InternalClientMembership.notifyClientCrashed(clientCrashed); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[CRASHED]; }); assertFalse(fired[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertTrue(fired[CRASHED]); assertEquals(clientCrashed, member[CRASHED]); assertEquals(clientCrashed.getId(), memberId[CRASHED]); assertTrue(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); } /** * Resets all elements of arrays used for listener testing. Boolean values are reset to false. * String values are reset to null. */ private void resetArraysForTesting(boolean[] fired, DistributedMember[] member, String[] memberId, boolean[] isClient) { for (int i = 0; i < fired.length; i++) { fired[i] = false; member[i] = null; memberId[i] = null; isClient[i] = false; } } /** * Tests unregisterClientMembershipListener to ensure that no further events are delivered to * unregistered listeners. */ @Test public void testUnregisterClientMembershipListener() throws Exception { final boolean[] fired = new boolean[1]; final DistributedMember[] member = new DistributedMember[1]; final String[] memberId = new String[1]; final boolean[] isClient = new boolean[1]; getSystem(); ClientMembershipListener listener = new ClientMembershipListener() { public void memberJoined(ClientMembershipEvent event) { fired[0] = true; member[0] = event.getMember(); memberId[0] = event.getMemberId(); isClient[0] = event.isClient(); } public void memberLeft(ClientMembershipEvent event) {} public void memberCrashed(ClientMembershipEvent event) {} }; ClientMembership.registerClientMembershipListener(listener); // fire event to make sure listener is registered DistributedMember clientJoined = new TestDistributedMember("clientJoined"); InternalClientMembership.notifyClientJoined(clientJoined); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED]; }); assertTrue(fired[0]); assertEquals(clientJoined, member[0]); assertEquals(clientJoined.getId(), memberId[0]); assertTrue(isClient[0]); resetArraysForTesting(fired, member, memberId, isClient); assertFalse(fired[0]); assertNull(memberId[0]); assertFalse(isClient[0]); // unregister and verify listener is not notified ClientMembership.unregisterClientMembershipListener(listener); InternalClientMembership.notifyClientJoined(clientJoined); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).until(() -> { return true; }); assertFalse(fired[0]); assertNull(member[0]); assertNull(memberId[0]); assertFalse(isClient[0]); } @Test public void testMultipleListeners() throws Exception { final int NUM_LISTENERS = 4; final boolean[] fired = new boolean[NUM_LISTENERS]; final DistributedMember[] member = new DistributedMember[NUM_LISTENERS]; final String[] memberId = new String[NUM_LISTENERS]; final boolean[] isClient = new boolean[NUM_LISTENERS]; getSystem(); final ClientMembershipListener[] listeners = new ClientMembershipListener[NUM_LISTENERS]; for (int i = 0; i < NUM_LISTENERS; i++) { final int whichListener = i; listeners[i] = new ClientMembershipListener() { public void memberJoined(ClientMembershipEvent event) { assertFalse(fired[whichListener]); assertNull(member[whichListener]); assertNull(memberId[whichListener]); assertFalse(isClient[whichListener]); fired[whichListener] = true; member[whichListener] = event.getMember(); memberId[whichListener] = event.getMemberId(); isClient[whichListener] = event.isClient(); } public void memberLeft(ClientMembershipEvent event) {} public void memberCrashed(ClientMembershipEvent event) {} }; } final DistributedMember clientJoined = new TestDistributedMember("clientJoined"); InternalClientMembership.notifyClientJoined(clientJoined); for (int i = 0; i < NUM_LISTENERS; i++) { synchronized (listeners[i]) { listeners[i].wait(20); } assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } // attempt to register same listener twice... 2nd reg should be ignored // failure would cause an assertion failure in memberJoined impl ClientMembership.registerClientMembershipListener(listeners[0]); ClientMembership.registerClientMembershipListener(listeners[0]); ClientMembershipListener[] registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(1, registeredListeners.length); assertEquals(listeners[0], registeredListeners[0]); ClientMembership.registerClientMembershipListener(listeners[1]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(2, registeredListeners.length); assertEquals(listeners[0], registeredListeners[0]); assertEquals(listeners[1], registeredListeners[1]); InternalClientMembership.notifyClientJoined(clientJoined); synchronized (listeners[1]) { if (!fired[1]) { listeners[1].wait(2000); } } for (int i = 0; i < NUM_LISTENERS; i++) { if (i < 2) { assertTrue(fired[i]); assertEquals(clientJoined, member[i]); assertEquals(clientJoined.getId(), memberId[i]); assertTrue(isClient[i]); } else { assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } } resetArraysForTesting(fired, member, memberId, isClient); ClientMembership.unregisterClientMembershipListener(listeners[0]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(1, registeredListeners.length); assertEquals(listeners[1], registeredListeners[0]); InternalClientMembership.notifyClientJoined(clientJoined); synchronized (listeners[1]) { if (!fired[1]) { listeners[1].wait(2000); } } for (int i = 0; i < NUM_LISTENERS; i++) { if (i == 1) { assertTrue(fired[i]); assertEquals(clientJoined, member[i]); assertEquals(clientJoined.getId(), memberId[i]); assertTrue(isClient[i]); } else { assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } } resetArraysForTesting(fired, member, memberId, isClient); ClientMembership.registerClientMembershipListener(listeners[2]); ClientMembership.registerClientMembershipListener(listeners[3]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(3, registeredListeners.length); assertEquals(listeners[1], registeredListeners[0]); assertEquals(listeners[2], registeredListeners[1]); assertEquals(listeners[3], registeredListeners[2]); InternalClientMembership.notifyClientJoined(clientJoined); synchronized (listeners[3]) { if (!fired[3]) { listeners[3].wait(2000); } } for (int i = 0; i < NUM_LISTENERS; i++) { if (i != 0) { assertTrue(fired[i]); assertEquals(clientJoined, member[i]); assertEquals(clientJoined.getId(), memberId[i]); assertTrue(isClient[i]); } else { assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } } resetArraysForTesting(fired, member, memberId, isClient); ClientMembership.registerClientMembershipListener(listeners[0]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(4, registeredListeners.length); assertEquals(listeners[1], registeredListeners[0]); assertEquals(listeners[2], registeredListeners[1]); assertEquals(listeners[3], registeredListeners[2]); assertEquals(listeners[0], registeredListeners[3]); InternalClientMembership.notifyClientJoined(clientJoined); synchronized (listeners[0]) { if (!fired[0]) { listeners[0].wait(2000); } } for (int i = 0; i < NUM_LISTENERS; i++) { assertTrue(fired[i]); assertEquals(clientJoined, member[i]); assertEquals(clientJoined.getId(), memberId[i]); assertTrue(isClient[i]); } resetArraysForTesting(fired, member, memberId, isClient); ClientMembership.unregisterClientMembershipListener(listeners[3]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(3, registeredListeners.length); assertEquals(listeners[1], registeredListeners[0]); assertEquals(listeners[2], registeredListeners[1]); assertEquals(listeners[0], registeredListeners[2]); InternalClientMembership.notifyClientJoined(clientJoined); synchronized (listeners[0]) { if (!fired[0]) { listeners[0].wait(2000); } } for (int i = 0; i < NUM_LISTENERS; i++) { if (i < 3) { assertTrue(fired[i]); assertEquals(clientJoined, member[i]); assertEquals(clientJoined.getId(), memberId[i]); assertTrue(isClient[i]); } else { assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } } resetArraysForTesting(fired, member, memberId, isClient); ClientMembership.unregisterClientMembershipListener(listeners[2]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(2, registeredListeners.length); assertEquals(listeners[1], registeredListeners[0]); assertEquals(listeners[0], registeredListeners[1]); InternalClientMembership.notifyClientJoined(clientJoined); synchronized (listeners[0]) { if (!fired[0]) { listeners[0].wait(2000); } } for (int i = 0; i < NUM_LISTENERS; i++) { if (i < 2) { assertTrue(fired[i]); assertEquals(clientJoined, member[i]); assertEquals(clientJoined.getId(), memberId[i]); assertTrue(isClient[i]); } else { assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } } resetArraysForTesting(fired, member, memberId, isClient); ClientMembership.unregisterClientMembershipListener(listeners[1]); ClientMembership.unregisterClientMembershipListener(listeners[0]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(0, registeredListeners.length); InternalClientMembership.notifyClientJoined(clientJoined); for (int i = 0; i < NUM_LISTENERS; i++) { synchronized (listeners[i]) { listeners[i].wait(20); } assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } resetArraysForTesting(fired, member, memberId, isClient); ClientMembership.registerClientMembershipListener(listeners[1]); registeredListeners = ClientMembership.getClientMembershipListeners(); assertEquals(1, registeredListeners.length); assertEquals(listeners[1], registeredListeners[0]); InternalClientMembership.notifyClientJoined(clientJoined); synchronized (listeners[1]) { if (!fired[1]) { listeners[1].wait(2000); } } for (int i = 0; i < NUM_LISTENERS; i++) { if (i == 1) { assertTrue(fired[i]); assertEquals(clientJoined, member[i]); assertEquals(clientJoined.getId(), memberId[i]); assertTrue(isClient[i]); } else { assertFalse(fired[i]); assertNull(member[i]); assertNull(memberId[i]); assertFalse(isClient[i]); } } } protected static int testClientMembershipEventsInClient_port; private static int getTestClientMembershipEventsInClient_port() { return testClientMembershipEventsInClient_port; } /** * Tests notification of events in client process. Bridge clients detect server joins when the * client connects to the server. If the server crashes or departs gracefully, the client will * detect this as a crash. */ @Test public void testClientMembershipEventsInClient() throws Exception { properties = null; getSystem(); IgnoredException.addIgnoredException("IOException"); final boolean[] fired = new boolean[3]; final DistributedMember[] member = new DistributedMember[3]; final String[] memberId = new String[3]; final boolean[] isClient = new boolean[3]; // create and register ClientMembershipListener in controller vm... ClientMembershipListener listener = new ClientMembershipListener() { public void memberJoined(ClientMembershipEvent event) { System.out.println("[testClientMembershipEventsInClient] memberJoined: " + event); fired[JOINED] = true; member[JOINED] = event.getMember(); memberId[JOINED] = event.getMemberId(); isClient[JOINED] = event.isClient(); } public void memberLeft(ClientMembershipEvent event) { System.out.println("[testClientMembershipEventsInClient] memberLeft: " + event); } public void memberCrashed(ClientMembershipEvent event) { System.out.println("[testClientMembershipEventsInClient] memberCrashed: " + event); fired[CRASHED] = true; member[CRASHED] = event.getMember(); memberId[CRASHED] = event.getMemberId(); isClient[CRASHED] = event.isClient(); } }; ClientMembership.registerClientMembershipListener(listener); final VM vm0 = Host.getHost(0).getVM(0); final String name = this.getUniqueName(); final int[] ports = new int[1]; // create BridgeServer in vm0... vm0.invoke("create Bridge Server", () -> { try { System.out.println("[testClientMembershipEventsInClient] Create BridgeServer"); getSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); Region region = createRegion(name, factory.create()); assertNotNull(region); assertNotNull(getRootRegion().getSubregion(name)); testClientMembershipEventsInClient_port = startBridgeServer(0); } catch (IOException e) { getSystem().getLogWriter().fine(new Exception(e)); fail("Failed to start CacheServer: " + e.getMessage()); } }); // gather details for later creation of ConnectionPool... ports[0] = vm0.invoke("getTestClientMembershipEventsInClient_port", () -> ClientMembershipDUnitTest.getTestClientMembershipEventsInClient_port()); assertTrue(ports[0] != 0); DistributedMember serverMember = (DistributedMember) vm0.invoke("get distributed member", () -> ClientMembershipDUnitTest.getDistributedMember()); String serverMemberId = serverMember.toString(); System.out.println("[testClientMembershipEventsInClient] ports[0]=" + ports[0]); System.out.println("[testClientMembershipEventsInClient] serverMember=" + serverMember); System.out.println("[testClientMembershipEventsInClient] serverMemberId=" + serverMemberId); assertFalse(fired[JOINED]); assertNull(member[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); // sanity check... System.out.println("[testClientMembershipEventsInClient] sanity check"); InternalClientMembership.notifyServerJoined(serverLocation); Awaitility.await().pollInterval(50, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS) .pollDelay(50, TimeUnit.MILLISECONDS).until(() -> fired[JOINED] || fired[CRASHED]); assertTrue(fired[JOINED]); assertNotNull(member[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // create bridge client in controller vm... System.out.println("[testClientMembershipEventsInClient] create bridge client"); Properties config = new Properties(); config.setProperty(MCAST_PORT, "0"); config.setProperty(LOCATORS, ""); config.setProperty(ENABLE_NETWORK_PARTITION_DETECTION, "false"); getSystem(config); try { getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(Host.getHost(0)), ports, true, -1, -1, null); createRegion(name, factory.create()); assertNotNull(getRootRegion().getSubregion(name)); } catch (CacheException ex) { Assert.fail("While creating Region on Edge", ex); } Awaitility.await().pollInterval(50, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS) .pollDelay(50, TimeUnit.MILLISECONDS).until(() -> fired[JOINED] || fired[CRASHED]); System.out.println("[testClientMembershipEventsInClient] assert client detected server join"); // first check the getCurrentServers() result ClientCache clientCache = (ClientCache) getCache(); Set<InetSocketAddress> servers = clientCache.getCurrentServers(); assertTrue(!servers.isEmpty()); InetSocketAddress serverAddr = servers.iterator().next(); InetSocketAddress expectedAddr = new InetSocketAddress(serverMember.getHost(), ports[0]); assertEquals(expectedAddr, serverAddr); // now check listener results assertTrue(fired[JOINED]); assertNotNull(member[JOINED]); assertNotNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); vm0.invoke("Stop BridgeServer", () -> stopBridgeServers(getCache())); Awaitility.await().pollInterval(50, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS) .pollDelay(50, TimeUnit.MILLISECONDS).until(() -> fired[JOINED] || fired[CRASHED]); System.out .println("[testClientMembershipEventsInClient] assert client detected server departure"); assertFalse(fired[JOINED]); assertNull(member[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertTrue(fired[CRASHED]); assertNotNull(member[CRASHED]); assertNotNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // now test that we redisover the bridge server vm0.invoke("Recreate BridgeServer", () -> { try { System.out.println("[testClientMembershipEventsInClient] restarting BridgeServer"); startBridgeServer(ports[0]); } catch (IOException e) { getSystem().getLogWriter().fine(new Exception(e)); fail("Failed to start CacheServer on VM1: " + e.getMessage()); } }); Awaitility.await().pollInterval(50, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS) .pollDelay(50, TimeUnit.MILLISECONDS).until(() -> fired[JOINED] || fired[CRASHED]); System.out .println("[testClientMembershipEventsInClient] assert client detected server recovery"); assertTrue(fired[JOINED]); assertNotNull(member[JOINED]); assertNotNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); } /** * Tests notification of events in server process. Bridge servers detect client joins when the * client connects to the server. */ @Test public void testClientMembershipEventsInServer() throws Exception { final boolean[] fired = new boolean[3]; final DistributedMember[] member = new DistributedMember[3]; final String[] memberId = new String[3]; final boolean[] isClient = new boolean[3]; // create and register ClientMembershipListener in controller vm... ClientMembershipListener listener = new ClientMembershipListener() { public void memberJoined(ClientMembershipEvent event) { System.out.println("[testClientMembershipEventsInServer] memberJoined: " + event); fired[JOINED] = true; member[JOINED] = event.getMember(); memberId[JOINED] = event.getMemberId(); isClient[JOINED] = event.isClient(); assertFalse(fired[LEFT] || fired[CRASHED]); } public void memberLeft(ClientMembershipEvent event) { System.out.println("[testClientMembershipEventsInServer] memberLeft: " + event); fired[LEFT] = true; member[LEFT] = event.getMember(); memberId[LEFT] = event.getMemberId(); isClient[LEFT] = event.isClient(); assertFalse(fired[JOINED] || fired[CRASHED]); } public void memberCrashed(ClientMembershipEvent event) { System.out.println("[testClientMembershipEventsInServer] memberCrashed: " + event); fired[CRASHED] = true; member[CRASHED] = event.getMember(); memberId[CRASHED] = event.getMemberId(); isClient[CRASHED] = event.isClient(); assertFalse(fired[JOINED] || fired[LEFT]); } }; ClientMembership.registerClientMembershipListener(listener); final VM vm0 = Host.getHost(0).getVM(0); final String name = this.getUniqueName(); final int[] ports = new int[1]; // create BridgeServer in controller vm... System.out.println("[testClientMembershipEventsInServer] Create BridgeServer"); getSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); Region region = createRegion(name, factory.create()); assertNotNull(region); assertNotNull(getRootRegion().getSubregion(name)); ports[0] = startBridgeServer(0); assertTrue(ports[0] != 0); DistributedMember serverMember = getMemberId(); String serverMemberId = serverMember.toString(); System.out.println("[testClientMembershipEventsInServer] ports[0]=" + ports[0]); System.out.println("[testClientMembershipEventsInServer] serverMemberId=" + serverMemberId); System.out.println("[testClientMembershipEventsInServer] serverMember=" + serverMember); assertFalse(fired[JOINED]); assertNull(member[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); // sanity check... System.out.println("[testClientMembershipEventsInServer] sanity check"); DistributedMember test = new TestDistributedMember("test"); InternalClientMembership.notifyClientJoined(test); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED] || fired[LEFT] || fired[CRASHED]; }); assertTrue(fired[JOINED]); assertEquals(test, member[JOINED]); assertEquals(test.getId(), memberId[JOINED]); assertTrue(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); final Host host = Host.getHost(0); SerializableCallable createConnectionPool = new SerializableCallable("Create connectionPool") { public Object call() { System.out.println("[testClientMembershipEventsInServer] create bridge client"); Properties config = new Properties(); config.setProperty(MCAST_PORT, "0"); config.setProperty(LOCATORS, ""); config.setProperty(ENABLE_NETWORK_PARTITION_DETECTION, "false"); properties = config; DistributedSystem s = getSystem(config); AttributesFactory factory = new AttributesFactory(); Pool pool = ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, true, -1, 2, null); createRegion(name, factory.create()); assertNotNull(getRootRegion().getSubregion(name)); assertTrue(s == basicGetSystem()); // see geode-1078 return getMemberId(); } }; // create bridge client in vm0... DistributedMember clientMember = (DistributedMember) vm0.invoke(createConnectionPool); String clientMemberId = clientMember.toString(); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED] || fired[LEFT] || fired[CRASHED]; }); System.out.println("[testClientMembershipEventsInServer] assert server detected client join"); assertTrue(fired[JOINED]); assertEquals(member[JOINED] + " should equal " + clientMember, clientMember, member[JOINED]); assertEquals(memberId[JOINED] + " should equal " + clientMemberId, clientMemberId, memberId[JOINED]); assertTrue(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); pauseForClientToJoin(); vm0.invoke(new SerializableRunnable("Stop bridge client") { public void run() { System.out.println("[testClientMembershipEventsInServer] Stop bridge client"); getRootRegion().getSubregion(name).close(); Map m = PoolManager.getAll(); Iterator mit = m.values().iterator(); while (mit.hasNext()) { Pool p = (Pool) mit.next(); p.destroy(); } } }); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED] || fired[LEFT] || fired[CRASHED]; }); System.out.println("[testClientMembershipEventsInServer] assert server detected client left"); assertFalse(fired[JOINED]); assertNull(member[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertTrue(fired[LEFT]); assertEquals(clientMember, member[LEFT]); assertEquals(clientMemberId, memberId[LEFT]); assertTrue(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); // reconnect bridge client to test for crashed event clientMemberId = vm0.invoke(createConnectionPool).toString(); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED] || fired[LEFT] || fired[CRASHED]; }); System.out .println("[testClientMembershipEventsInServer] assert server detected client re-join"); assertTrue(fired[JOINED]); assertEquals(clientMember, member[JOINED]); assertEquals(clientMemberId, memberId[JOINED]); assertTrue(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertFalse(fired[CRASHED]); assertNull(member[CRASHED]); assertNull(memberId[CRASHED]); assertFalse(isClient[CRASHED]); resetArraysForTesting(fired, member, memberId, isClient); pauseForClientToJoin(); ServerConnection.setForceClientCrashEvent(true); try { vm0.invoke(new SerializableRunnable("Stop bridge client") { public void run() { System.out.println("[testClientMembershipEventsInServer] Stop bridge client"); getRootRegion().getSubregion(name).close(); Map m = PoolManager.getAll(); Iterator mit = m.values().iterator(); while (mit.hasNext()) { Pool p = (Pool) mit.next(); p.destroy(); } } }); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED] || fired[LEFT] || fired[CRASHED]; }); System.out .println("[testClientMembershipEventsInServer] assert server detected client crashed"); assertFalse(fired[JOINED]); assertNull(member[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); assertFalse(fired[LEFT]); assertNull(member[LEFT]); assertNull(memberId[LEFT]); assertFalse(isClient[LEFT]); assertTrue(fired[CRASHED]); assertEquals(clientMember, member[CRASHED]); assertEquals(clientMemberId, memberId[CRASHED]); assertTrue(isClient[CRASHED]); } finally { ServerConnection.setForceClientCrashEvent(false); } } /** * The joined event fires when the first client handshake is processed. This pauses long enough to * allow the rest of the client sockets to complete handshaking before making the client leave. * Without doing this subsequent socket handshakes that are processed could fire join events after * departure events and then a departure event again. If you see failures in * testClientMembershipEventsInServer, try increasing this timeout. */ private void pauseForClientToJoin() { Wait.pause(2000); } /** * Tests registration and event notification in conjunction with disconnecting and reconnecting to * DistributedSystem. */ @Test public void testLifecycle() throws Exception { final boolean[] fired = new boolean[3]; final DistributedMember[] member = new DistributedMember[3]; final String[] memberId = new String[3]; final boolean[] isClient = new boolean[3]; // create and register ClientMembershipListener in controller vm... ClientMembershipListener listener = new ClientMembershipListener() { public void memberJoined(ClientMembershipEvent event) { assertFalse(fired[JOINED]); assertNull(member[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); fired[JOINED] = true; member[JOINED] = event.getMember(); memberId[JOINED] = event.getMemberId(); isClient[JOINED] = event.isClient(); } public void memberLeft(ClientMembershipEvent event) {} public void memberCrashed(ClientMembershipEvent event) {} }; ClientMembership.registerClientMembershipListener(listener); // create loner in controller vm... Properties config = new Properties(); config.setProperty(MCAST_PORT, "0"); config.setProperty(LOCATORS, ""); properties = config; getSystem(config); // assert that event is fired while connected InternalClientMembership.notifyServerJoined(serverLocation); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED]; }); assertTrue(fired[JOINED]); assertNotNull(member[JOINED]); assertFalse(isClient[JOINED]); resetArraysForTesting(fired, member, memberId, isClient); // assert that event is NOT fired while disconnected disconnectFromDS(); InternalClientMembership.notifyServerJoined(serverLocation); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).until(() -> { return true; }); assertFalse(fired[JOINED]); assertNull(member[JOINED]); assertNull(memberId[JOINED]); assertFalse(isClient[JOINED]); resetArraysForTesting(fired, member, memberId, isClient); // assert that event is fired again after reconnecting properties = config; InternalDistributedSystem sys = getSystem(config); assertTrue(sys.isConnected()); InternalClientMembership.notifyServerJoined(serverLocation); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { return fired[JOINED]; }); assertTrue(fired[JOINED]); assertNotNull(member[JOINED]); assertFalse(isClient[JOINED]); } /** * Starts up server in controller vm and 4 clients, then calls and tests * ClientMembership.getConnectedClients(). */ @Test public void testGetConnectedClients() throws Exception { final String name = this.getUniqueName(); final int[] ports = new int[1]; IgnoredException.addIgnoredException("ConnectException"); // create BridgeServer in controller vm... System.out.println("[testGetConnectedClients] Create BridgeServer"); getSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); Region region = createRegion(name, factory.create()); assertNotNull(region); assertNotNull(getRootRegion().getSubregion(name)); ports[0] = startBridgeServer(0); assertTrue(ports[0] != 0); String serverMemberId = getSystem().getDistributedMember().toString(); System.out.println("[testGetConnectedClients] ports[0]=" + ports[0]); System.out.println("[testGetConnectedClients] serverMemberId=" + serverMemberId); final Host host = Host.getHost(0); SerializableCallable createPool = new SerializableCallable("Create connection pool") { public Object call() { System.out.println("[testGetConnectedClients] create bridge client"); properties = new Properties(); properties.setProperty(MCAST_PORT, "0"); properties.setProperty(LOCATORS, ""); getSystem(properties); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); Pool p = ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, true, -1, -1, null); createRegion(name, factory.create()); assertNotNull(getRootRegion().getSubregion(name)); assertTrue(p.getServers().size() > 0); return getMemberId(); } }; // create bridge client in vm0... final String[] clientMemberIdArray = new String[host.getVMCount()]; for (int i = 0; i < host.getVMCount(); i++) { final VM vm = Host.getHost(0).getVM(i); System.out.println("creating pool in vm_" + i); clientMemberIdArray[i] = vm.invoke(createPool).toString(); } Collection clientMemberIds = Arrays.asList(clientMemberIdArray); { final int expectedClientCount = clientMemberIds.size(); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { Map connectedClients = InternalClientMembership.getConnectedClients(false); if (connectedClients == null) { return false; } if (connectedClients.size() != expectedClientCount) { return false; } return true; }); } Map connectedClients = InternalClientMembership.getConnectedClients(false); assertNotNull(connectedClients); assertEquals(clientMemberIds.size(), connectedClients.size()); System.out .println("connectedClients: " + connectedClients + "; clientMemberIds: " + clientMemberIds); for (Iterator iter = connectedClients.keySet().iterator(); iter.hasNext();) { String connectedClient = (String) iter.next(); System.out.println("[testGetConnectedClients] checking for client " + connectedClient); assertTrue(clientMemberIds.contains(connectedClient)); Object[] result = (Object[]) connectedClients.get(connectedClient); System.out.println("[testGetConnectedClients] result: " + (result == null ? "none" : String.valueOf(result[0]) + "; connections=" + result[1])); } } /** * Starts up 4 server and the controller vm as a client, then calls and tests * ClientMembership.getConnectedServers(). */ @Test public void testGetConnectedServers() throws Exception { final Host host = Host.getHost(0); final String name = this.getUniqueName(); final int[] ports = new int[host.getVMCount()]; for (int i = 0; i < host.getVMCount(); i++) { final int whichVM = i; final VM vm = Host.getHost(0).getVM(i); vm.invoke("Create bridge server", () -> { // create BridgeServer in controller vm... System.out.println("[testGetConnectedServers] Create BridgeServer"); getSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); Region region = createRegion(name + "_" + whichVM, factory.create()); assertNotNull(region); assertNotNull(getRootRegion().getSubregion(name + "_" + whichVM)); region.put("KEY-1", "VAL-1"); try { testGetConnectedServers_port = startBridgeServer(0); } catch (IOException e) { org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .error("startBridgeServer threw IOException", e); fail("startBridgeServer threw IOException " + e.getMessage()); } assertTrue(testGetConnectedServers_port != 0); System.out.println("[testGetConnectedServers] port=" + ports[whichVM]); System.out.println("[testGetConnectedServers] serverMemberId=" + getDistributedMember()); }); ports[whichVM] = vm.invoke("getTestGetConnectedServers_port", () -> ClientMembershipDUnitTest.getTestGetConnectedServers_port()); assertTrue(ports[whichVM] != 0); } System.out.println("[testGetConnectedServers] create bridge client"); Properties config = new Properties(); config.setProperty(MCAST_PORT, "0"); config.setProperty(LOCATORS, ""); properties = config; getSystem(config); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); for (int i = 0; i < ports.length; i++) { System.out.println("[testGetConnectedServers] creating connectionpool for " + NetworkUtils.getServerHostName(host) + " " + ports[i]); int[] thisServerPorts = new int[] {ports[i]}; ClientServerTestCase.configureConnectionPoolWithName(factory, NetworkUtils.getServerHostName(host), thisServerPorts, false, -1, -1, null, "pooly" + i); Region region = createRegion(name + "_" + i, factory.create()); assertNotNull(getRootRegion().getSubregion(name + "_" + i)); region.get("KEY-1"); } final int expectedVMCount = host.getVMCount(); Awaitility.await().pollInterval(100, TimeUnit.MILLISECONDS) .pollDelay(100, TimeUnit.MILLISECONDS).timeout(300, TimeUnit.SECONDS).until(() -> { if (PoolManager.getAll().size() != expectedVMCount) { return false; } Map connectedServers = InternalClientMembership.getConnectedServers(); if (connectedServers == null) { return false; } if (connectedServers.size() != expectedVMCount) { return false; } return true; }); assertEquals(host.getVMCount(), PoolManager.getAll().size()); Map connectedServers = InternalClientMembership.getConnectedServers(); assertNotNull(connectedServers); assertEquals(host.getVMCount(), connectedServers.size()); for (Iterator iter = connectedServers.keySet().iterator(); iter.hasNext();) { String connectedServer = (String) iter.next(); System.out.println("[testGetConnectedServers] value for connectedServer: " + connectedServers.get(connectedServer)); } } protected static int testGetConnectedServers_port; private static int getTestGetConnectedServers_port() { return testGetConnectedServers_port; } public Properties getDistributedSystemProperties() { if (properties == null) { properties = new Properties(); properties.put(ConfigurationProperties.ENABLE_NETWORK_PARTITION_DETECTION, "false"); } return properties; } /** * Tests getConnectedClients(boolean onlyClientsNotifiedByThisServer) where * onlyClientsNotifiedByThisServer is true. */ @Test public void testGetNotifiedClients() throws Exception { final Host host = Host.getHost(0); final String name = this.getUniqueName(); final int[] ports = new int[host.getVMCount()]; for (int i = 0; i < host.getVMCount(); i++) { final int whichVM = i; final VM vm = Host.getHost(0).getVM(i); vm.invoke(new CacheSerializableRunnable("Create bridge server") { public void run2() throws CacheException { // create BridgeServer in controller vm... System.out.println("[testGetNotifiedClients] Create BridgeServer"); getSystem(); AttributesFactory factory = new AttributesFactory(); Region region = createRegion(name, factory.create()); assertNotNull(region); assertNotNull(getRootRegion().getSubregion(name)); region.put("KEY-1", "VAL-1"); try { testGetNotifiedClients_port = startBridgeServer(0); } catch (IOException e) { org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .error("startBridgeServer threw IOException", e); fail("startBridgeServer threw IOException " + e.getMessage()); } assertTrue(testGetNotifiedClients_port != 0); System.out.println("[testGetNotifiedClients] port=" + ports[whichVM]); System.out.println("[testGetNotifiedClients] serverMemberId=" + getMemberId()); } }); ports[whichVM] = vm.invoke("getTestGetNotifiedClients_port", () -> ClientMembershipDUnitTest.getTestGetNotifiedClients_port()); assertTrue(ports[whichVM] != 0); } System.out.println("[testGetNotifiedClients] create bridge client"); Properties config = new Properties(); config.setProperty(MCAST_PORT, "0"); config.setProperty(LOCATORS, ""); properties = config; getSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); System.out.println("[testGetNotifiedClients] creating connection pool"); ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, true, -1, -1, null); Region region = createRegion(name, factory.create()); assertNotNull(getRootRegion().getSubregion(name)); region.registerInterest("KEY-1"); region.get("KEY-1"); final String clientMemberId = getMemberId().toString(); pauseForClientToJoin(); // assertions go here int[] clientCounts = new int[host.getVMCount()]; // only one server vm will have that client for updating for (int i = 0; i < host.getVMCount(); i++) { final int whichVM = i; final VM vm = Host.getHost(0).getVM(i); vm.invoke("Create bridge server", () -> { Map clients = InternalClientMembership.getConnectedClients(true); assertNotNull(clients); testGetNotifiedClients_clientCount = clients.size(); // [bruce] this is not a valid assertion - the server may not use // fully qualified host names while clients always use them in // forming their member ID. The test needs to check InetAddresses, // not strings // if (testGetNotifiedClients_clientCount > 0) { // // assert that the clientMemberId matches // assertEquals(clientMemberId, clients.keySet().iterator().next()); // } }); clientCounts[whichVM] = vm.invoke("getTestGetNotifiedClients_clientCount", () -> ClientMembershipDUnitTest.getTestGetNotifiedClients_clientCount()); } // only one server should have a notifier for this client... int totalClientCounts = 0; for (int i = 0; i < clientCounts.length; i++) { totalClientCounts += clientCounts[i]; } // this assertion fails because the count is 4 // assertIndexDetailsEquals(1, totalClientCounts); } protected static int testGetNotifiedClients_port; private static int getTestGetNotifiedClients_port() { return testGetNotifiedClients_port; } protected static int testGetNotifiedClients_clientCount; private static int getTestGetNotifiedClients_clientCount() { return testGetNotifiedClients_clientCount; } // Simple DistributedMember implementation static class TestDistributedMember implements DistributedMember { private String host; public TestDistributedMember(String host) { this.host = host; } public String getName() { return ""; } public String getHost() { return this.host; } public Set getRoles() { return new HashSet(); } public int getProcessId() { return 0; } public String getId() { return this.host; } public int compareTo(DistributedMember o) { if ((o == null) || !(o instanceof TestDistributedMember)) { throw new InternalGemFireException("Invalidly comparing TestDistributedMember to " + o); } TestDistributedMember tds = (TestDistributedMember) o; return getHost().compareTo(tds.getHost()); } public boolean equals(Object obj) { if ((obj == null) || !(obj instanceof TestDistributedMember)) { return false; } return compareTo((TestDistributedMember) obj) == 0; } public int hashCode() { return getHost().hashCode(); } public DurableClientAttributes getDurableClientAttributes() { return null; } public List<String> getGroups() { return Collections.emptyList(); } } }
apache-2.0
pax95/camel
core/camel-core-processor/src/main/java/org/apache/camel/processor/errorhandler/RedeliveryPolicy.java
27598
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.errorhandler; import java.io.Serializable; import java.util.Random; import org.apache.camel.Exchange; import org.apache.camel.LoggingLevel; import org.apache.camel.Predicate; import org.apache.camel.spi.Configurer; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.StringHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The policy used to decide how many times to redeliver and the time between the redeliveries before being sent to a * <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter Channel</a> * <p> * The default values are: * <ul> * <li>maximumRedeliveries = 0</li> * <li>redeliveryDelay = 1000L (the initial delay)</li> * <li>maximumRedeliveryDelay = 60 * 1000L</li> * <li>asyncDelayedRedelivery = false</li> * <li>backOffMultiplier = 2</li> * <li>useExponentialBackOff = false</li> * <li>collisionAvoidanceFactor = 0.15d</li> * <li>useCollisionAvoidance = false</li> * <li>retriesExhaustedLogLevel = LoggingLevel.ERROR</li> * <li>retryAttemptedLogLevel = LoggingLevel.DEBUG</li> * <li>retryAttemptedLogInterval = 1</li> * <li>logRetryAttempted = true</li> * <li>logRetryStackTrace = false</li> * <li>logStackTrace = true</li> * <li>logHandled = false</li> * <li>logExhausted = true</li> * <li>logExhaustedMessageHistory = true</li> * <li>logExhaustedMessageBody = false</li> * <li>logNewException = true</li> * <li>allowRedeliveryWhileStopping = true</li> * </ul> * <p/> * Setting the maximumRedeliveries to a negative value such as -1 will then always redeliver (unlimited). Setting the * maximumRedeliveries to 0 will disable redelivery. * <p/> * This policy can be configured either by one of the following two settings: * <ul> * <li>using conventional options, using all the options defined above</li> * <li>using delay pattern to declare intervals for delays</li> * </ul> * <p/> * <b>Note:</b> If using delay patterns then the following options is not used (delay, backOffMultiplier, * useExponentialBackOff, useCollisionAvoidance) * <p/> * <b>Using delay pattern</b>: <br/> * The delay pattern syntax is: <tt>limit:delay;limit 2:delay 2;limit 3:delay 3;...;limit N:delay N</tt>. * <p/> * How it works is best illustrate with an example with this pattern: <tt>delayPattern=5:1000;10:5000:20:20000</tt> * <br/> * The delays will be for attempt in range 0..4 = 0 millis, 5..9 = 1000 millis, 10..19 = 5000 millis, >= 20 = 20000 * millis. * <p/> * If you want to set a starting delay, then use 0 as the first limit, eg: <tt>0:1000;5:5000</tt> will use 1 sec delay * until attempt number 5 where it will use 5 seconds going forward. */ @Configurer public class RedeliveryPolicy implements Cloneable, Serializable { // default policy using out of the box settings which can be shared public static final RedeliveryPolicy DEFAULT_POLICY = new RedeliveryPolicy(); protected static Random randomNumberGenerator; private static final long serialVersionUID = -338222777701473252L; private static final Logger LOG = LoggerFactory.getLogger(RedeliveryPolicy.class); protected long redeliveryDelay = 1000L; protected int maximumRedeliveries; protected long maximumRedeliveryDelay = 60 * 1000L; protected double backOffMultiplier = 2; protected boolean useExponentialBackOff; // +/-15% for a 30% spread -cgs protected double collisionAvoidanceFactor = 0.15d; protected boolean useCollisionAvoidance; protected LoggingLevel retriesExhaustedLogLevel = LoggingLevel.ERROR; protected LoggingLevel retryAttemptedLogLevel = LoggingLevel.DEBUG; protected int retryAttemptedLogInterval = 1; protected boolean logStackTrace = true; protected boolean logRetryStackTrace; protected boolean logHandled; protected boolean logContinued; protected boolean logExhausted = true; protected boolean logNewException = true; protected Boolean logExhaustedMessageHistory; protected Boolean logExhaustedMessageBody; protected boolean logRetryAttempted = true; protected String delayPattern; protected boolean asyncDelayedRedelivery; protected boolean allowRedeliveryWhileStopping = true; protected String exchangeFormatterRef; public RedeliveryPolicy() { } @Override public String toString() { return "RedeliveryPolicy[maximumRedeliveries=" + maximumRedeliveries + ", redeliveryDelay=" + redeliveryDelay + ", maximumRedeliveryDelay=" + maximumRedeliveryDelay + ", asyncDelayedRedelivery=" + asyncDelayedRedelivery + ", allowRedeliveryWhileStopping=" + allowRedeliveryWhileStopping + ", retriesExhaustedLogLevel=" + retriesExhaustedLogLevel + ", retryAttemptedLogLevel=" + retryAttemptedLogLevel + ", retryAttemptedLogInterval=" + retryAttemptedLogInterval + ", logRetryAttempted=" + logRetryAttempted + ", logStackTrace=" + logStackTrace + ", logRetryStackTrace=" + logRetryStackTrace + ", logHandled=" + logHandled + ", logContinued=" + logContinued + ", logExhausted=" + logExhausted + ", logNewException=" + logNewException + ", logExhaustedMessageHistory=" + logExhaustedMessageHistory + ", logExhaustedMessageBody=" + logExhaustedMessageBody + ", useExponentialBackOff=" + useExponentialBackOff + ", backOffMultiplier=" + backOffMultiplier + ", useCollisionAvoidance=" + useCollisionAvoidance + ", collisionAvoidanceFactor=" + collisionAvoidanceFactor + ", delayPattern=" + delayPattern + ", exchangeFormatterRef=" + exchangeFormatterRef + "]"; } public RedeliveryPolicy copy() { try { return (RedeliveryPolicy) clone(); } catch (CloneNotSupportedException e) { throw new RuntimeException("Could not clone: " + e, e); } } /** * Returns true if the policy decides that the message exchange should be redelivered. * * @param exchange the current exchange * @param redeliveryCounter the current retry counter * @param retryWhile an optional predicate to determine if we should redeliver or not * @return true to redeliver, false to stop */ public boolean shouldRedeliver(Exchange exchange, int redeliveryCounter, Predicate retryWhile) { // predicate is always used if provided if (retryWhile != null) { return retryWhile.matches(exchange); } if (getMaximumRedeliveries() < 0) { // retry forever if negative value return true; } // redeliver until we hit the max return redeliveryCounter <= getMaximumRedeliveries(); } /** * Calculates the new redelivery delay based on the last one and then <b>sleeps</b> for the necessary amount of * time. * <p/> * This implementation will block while sleeping. * * @param redeliveryDelay previous redelivery delay * @param redeliveryCounter number of previous redelivery attempts * @return the calculate delay * @throws InterruptedException is thrown if the sleep is interrupted likely because of shutdown */ public long sleep(long redeliveryDelay, int redeliveryCounter) throws InterruptedException { redeliveryDelay = calculateRedeliveryDelay(redeliveryDelay, redeliveryCounter); if (redeliveryDelay > 0) { sleep(redeliveryDelay); } return redeliveryDelay; } /** * Sleeps for the given delay * * @param redeliveryDelay the delay * @throws InterruptedException is thrown if the sleep is interrupted likely because of shutdown */ public void sleep(long redeliveryDelay) throws InterruptedException { LOG.debug("Sleeping for: {} millis until attempting redelivery", redeliveryDelay); Thread.sleep(redeliveryDelay); } /** * Calculates the new redelivery delay based on the last one * * @param previousDelay previous redelivery delay * @param redeliveryCounter number of previous redelivery attempts * @return the calculate delay */ public long calculateRedeliveryDelay(long previousDelay, int redeliveryCounter) { if (ObjectHelper.isNotEmpty(delayPattern)) { // calculate delay using the pattern return calculateRedeliverDelayUsingPattern(delayPattern, redeliveryCounter); } // calculate the delay using the conventional parameters long redeliveryDelayResult; if (previousDelay == 0) { redeliveryDelayResult = redeliveryDelay; } else if (useExponentialBackOff && backOffMultiplier > 1) { redeliveryDelayResult = Math.round(backOffMultiplier * previousDelay); } else { redeliveryDelayResult = previousDelay; } if (useCollisionAvoidance) { /* * First random determines +/-, second random determines how far to * go in that direction. -cgs */ Random random = getRandomNumberGenerator(); // NOSONAR double variance = (random.nextBoolean() ? collisionAvoidanceFactor : -collisionAvoidanceFactor) * random.nextDouble(); redeliveryDelayResult += redeliveryDelayResult * variance; } // ensure the calculated result is not bigger than the max delay (if configured) if (maximumRedeliveryDelay > 0 && redeliveryDelayResult > maximumRedeliveryDelay) { redeliveryDelayResult = maximumRedeliveryDelay; } return redeliveryDelayResult; } /** * Calculates the delay using the delay pattern */ protected static long calculateRedeliverDelayUsingPattern(String delayPattern, int redeliveryCounter) { String[] groups = delayPattern.split(";"); // find the group where the redelivery counter matches long answer = 0; for (String group : groups) { long delay = Long.parseLong(StringHelper.after(group, ":")); int count = Integer.parseInt(StringHelper.before(group, ":")); if (count > redeliveryCounter) { break; } else { answer = delay; } } return answer; } // Builder methods // ------------------------------------------------------------------------- /** * Sets the initial redelivery delay in milliseconds */ public RedeliveryPolicy redeliveryDelay(long delay) { setRedeliveryDelay(delay); return this; } /** * Sets the maximum number of times a message exchange will be redelivered */ public RedeliveryPolicy maximumRedeliveries(int maximumRedeliveries) { setMaximumRedeliveries(maximumRedeliveries); return this; } /** * Enables collision avoidance which adds some randomization to the backoff timings to reduce contention probability */ public RedeliveryPolicy useCollisionAvoidance() { setUseCollisionAvoidance(true); return this; } /** * Enables exponential backoff using the {@link #getBackOffMultiplier()} to increase the time between retries */ public RedeliveryPolicy useExponentialBackOff() { setUseExponentialBackOff(true); return this; } /** * Enables exponential backoff and sets the multiplier used to increase the delay between redeliveries */ public RedeliveryPolicy backOffMultiplier(double multiplier) { useExponentialBackOff(); setBackOffMultiplier(multiplier); return this; } /** * Enables collision avoidance and sets the percentage used */ public RedeliveryPolicy collisionAvoidancePercent(double collisionAvoidancePercent) { useCollisionAvoidance(); setCollisionAvoidancePercent(collisionAvoidancePercent); return this; } /** * Sets the maximum redelivery delay if using exponential back off. Use -1 if you wish to have no maximum */ public RedeliveryPolicy maximumRedeliveryDelay(long maximumRedeliveryDelay) { setMaximumRedeliveryDelay(maximumRedeliveryDelay); return this; } /** * Sets the logging level to use for log messages when retries have been exhausted. */ public RedeliveryPolicy retriesExhaustedLogLevel(LoggingLevel retriesExhaustedLogLevel) { setRetriesExhaustedLogLevel(retriesExhaustedLogLevel); return this; } /** * Sets the logging level to use for log messages when retries are attempted. */ public RedeliveryPolicy retryAttemptedLogLevel(LoggingLevel retryAttemptedLogLevel) { setRetryAttemptedLogLevel(retryAttemptedLogLevel); return this; } /** * Sets the interval to log retry attempts */ public RedeliveryPolicy retryAttemptedLogInterval(int logRetryAttemptedInterval) { setRetryAttemptedLogInterval(logRetryAttemptedInterval); return this; } /** * Sets whether to log retry attempts */ public RedeliveryPolicy logRetryAttempted(boolean logRetryAttempted) { setLogRetryAttempted(logRetryAttempted); return this; } /** * Sets whether to log stacktrace for failed messages. */ public RedeliveryPolicy logStackTrace(boolean logStackTrace) { setLogStackTrace(logStackTrace); return this; } /** * Sets whether to log stacktrace for failed redelivery attempts */ public RedeliveryPolicy logRetryStackTrace(boolean logRetryStackTrace) { setLogRetryStackTrace(logRetryStackTrace); return this; } /** * Sets whether to log errors even if its handled */ public RedeliveryPolicy logHandled(boolean logHandled) { setLogHandled(logHandled); return this; } /** * Sets whether errors should be logged when a new exception occurred during handling a previous exception */ public RedeliveryPolicy logNewException(boolean logNewException) { setLogNewException(logNewException); return this; } /** * Sets whether to log exhausted errors */ public RedeliveryPolicy logExhausted(boolean logExhausted) { setLogExhausted(logExhausted); return this; } /** * Sets whether to log exhausted errors including message history */ public RedeliveryPolicy logExhaustedMessageHistory(boolean logExhaustedMessageHistory) { setLogExhaustedMessageHistory(logExhaustedMessageHistory); return this; } /** * Sets whether to log exhausted errors including message body (requires message history to be enabled) */ public RedeliveryPolicy logExhaustedMessageBody(boolean logExhaustedMessageBody) { setLogExhaustedMessageBody(logExhaustedMessageBody); return this; } /** * Sets the delay pattern with delay intervals. */ public RedeliveryPolicy delayPattern(String delayPattern) { setDelayPattern(delayPattern); return this; } /** * Disables redelivery by setting maximum redeliveries to 0. */ public RedeliveryPolicy disableRedelivery() { setMaximumRedeliveries(0); return this; } /** * Allow asynchronous delayed redelivery. * * @see #setAsyncDelayedRedelivery(boolean) */ public RedeliveryPolicy asyncDelayedRedelivery() { setAsyncDelayedRedelivery(true); return this; } /** * Controls whether to allow redelivery while stopping/shutting down a route that uses error handling. * * @param redeliverWhileStopping <tt>true</tt> to allow redelivery, <tt>false</tt> to reject redeliveries */ public RedeliveryPolicy allowRedeliveryWhileStopping(boolean redeliverWhileStopping) { setAllowRedeliveryWhileStopping(redeliverWhileStopping); return this; } /** * Sets the reference of the instance of {@link org.apache.camel.spi.ExchangeFormatter} to generate the log message * from exchange. * * @param exchangeFormatterRef name of the instance of {@link org.apache.camel.spi.ExchangeFormatter} * @return the builder */ public RedeliveryPolicy exchangeFormatterRef(String exchangeFormatterRef) { setExchangeFormatterRef(exchangeFormatterRef); return this; } // Properties // ------------------------------------------------------------------------- public long getRedeliveryDelay() { return redeliveryDelay; } /** * Sets the initial redelivery delay in milliseconds */ public void setRedeliveryDelay(long redeliverDelay) { this.redeliveryDelay = redeliverDelay; // if max enabled then also set max to this value in case max was too low if (maximumRedeliveryDelay > 0 && redeliverDelay > maximumRedeliveryDelay) { this.maximumRedeliveryDelay = redeliverDelay; } } public double getBackOffMultiplier() { return backOffMultiplier; } /** * Sets the multiplier used to increase the delay between redeliveries if {@link #setUseExponentialBackOff(boolean)} * is enabled */ public void setBackOffMultiplier(double backOffMultiplier) { this.backOffMultiplier = backOffMultiplier; } public long getCollisionAvoidancePercent() { return Math.round(collisionAvoidanceFactor * 100); } /** * Sets the percentage used for collision avoidance if enabled via {@link #setUseCollisionAvoidance(boolean)} */ public void setCollisionAvoidancePercent(double collisionAvoidancePercent) { this.collisionAvoidanceFactor = collisionAvoidancePercent * 0.01d; } public double getCollisionAvoidanceFactor() { return collisionAvoidanceFactor; } /** * Sets the factor used for collision avoidance if enabled via {@link #setUseCollisionAvoidance(boolean)} */ public void setCollisionAvoidanceFactor(double collisionAvoidanceFactor) { this.collisionAvoidanceFactor = collisionAvoidanceFactor; } public int getMaximumRedeliveries() { return maximumRedeliveries; } /** * Sets the maximum number of times a message exchange will be redelivered. Setting a negative value will retry * forever. */ public void setMaximumRedeliveries(int maximumRedeliveries) { this.maximumRedeliveries = maximumRedeliveries; } public long getMaximumRedeliveryDelay() { return maximumRedeliveryDelay; } /** * Sets the maximum redelivery delay. Use -1 if you wish to have no maximum */ public void setMaximumRedeliveryDelay(long maximumRedeliveryDelay) { this.maximumRedeliveryDelay = maximumRedeliveryDelay; } public boolean isUseCollisionAvoidance() { return useCollisionAvoidance; } /** * Enables/disables collision avoidance which adds some randomization to the backoff timings to reduce contention * probability */ public void setUseCollisionAvoidance(boolean useCollisionAvoidance) { this.useCollisionAvoidance = useCollisionAvoidance; } public boolean isUseExponentialBackOff() { return useExponentialBackOff; } /** * Enables/disables exponential backoff using the {@link #getBackOffMultiplier()} to increase the time between * retries */ public void setUseExponentialBackOff(boolean useExponentialBackOff) { this.useExponentialBackOff = useExponentialBackOff; } protected static synchronized Random getRandomNumberGenerator() { if (randomNumberGenerator == null) { randomNumberGenerator = new Random(); // NOSONAR } return randomNumberGenerator; } /** * Sets the logging level to use for log messages when retries have been exhausted. */ public void setRetriesExhaustedLogLevel(LoggingLevel retriesExhaustedLogLevel) { this.retriesExhaustedLogLevel = retriesExhaustedLogLevel; } public LoggingLevel getRetriesExhaustedLogLevel() { return retriesExhaustedLogLevel; } /** * Sets the logging level to use for log messages when retries are attempted. */ public void setRetryAttemptedLogLevel(LoggingLevel retryAttemptedLogLevel) { this.retryAttemptedLogLevel = retryAttemptedLogLevel; } public LoggingLevel getRetryAttemptedLogLevel() { return retryAttemptedLogLevel; } public int getRetryAttemptedLogInterval() { return retryAttemptedLogInterval; } /** * Sets the interval to log retry attempts */ public void setRetryAttemptedLogInterval(int retryAttemptedLogInterval) { this.retryAttemptedLogInterval = retryAttemptedLogInterval; } public String getDelayPattern() { return delayPattern; } /** * Sets an optional delay pattern to use instead of fixed delay. */ public void setDelayPattern(String delayPattern) { this.delayPattern = delayPattern; } public boolean isLogStackTrace() { return logStackTrace; } /** * Sets whether stack traces should be logged or not */ public void setLogStackTrace(boolean logStackTrace) { this.logStackTrace = logStackTrace; } public boolean isLogRetryStackTrace() { return logRetryStackTrace; } /** * Sets whether stack traces should be logged or not */ public void setLogRetryStackTrace(boolean logRetryStackTrace) { this.logRetryStackTrace = logRetryStackTrace; } public boolean isLogHandled() { return logHandled; } /** * Sets whether errors should be logged even if its handled */ public void setLogHandled(boolean logHandled) { this.logHandled = logHandled; } public boolean isLogNewException() { return logNewException; } /** * Sets whether errors should be logged when a new exception occurred during handling a previous exception */ public void setLogNewException(boolean logNewException) { this.logNewException = logNewException; } public boolean isLogContinued() { return logContinued; } /** * Sets whether errors should be logged even if its continued */ public void setLogContinued(boolean logContinued) { this.logContinued = logContinued; } public boolean isLogRetryAttempted() { return logRetryAttempted; } /** * Sets whether retry attempts should be logged or not */ public void setLogRetryAttempted(boolean logRetryAttempted) { this.logRetryAttempted = logRetryAttempted; } public boolean isLogExhausted() { return logExhausted; } /** * Sets whether exhausted exceptions should be logged or not */ public void setLogExhausted(boolean logExhausted) { this.logExhausted = logExhausted; } public boolean isLogExhaustedMessageHistory() { // should default be enabled return logExhaustedMessageHistory == null || logExhaustedMessageHistory; } /** * Whether the option logExhaustedMessageHistory has been configured or not * * @return <tt>null</tt> if not configured, or the configured value as true or false * @see #isLogExhaustedMessageHistory() */ public Boolean getLogExhaustedMessageHistory() { return logExhaustedMessageHistory; } /** * Sets whether exhausted exceptions should be logged with message history included. */ public void setLogExhaustedMessageHistory(boolean logExhaustedMessageHistory) { this.logExhaustedMessageHistory = logExhaustedMessageHistory; } public boolean isLogExhaustedMessageBody() { // should default be disabled return logExhaustedMessageBody != null && logExhaustedMessageBody; } /** * Whether the option logExhaustedMessageBody has been configured or not * * @return <tt>null</tt> if not configured, or the configured value as true or false * @see #isLogExhaustedMessageBody() */ public Boolean getLogExhaustedMessageBody() { return logExhaustedMessageBody; } /** * Sets whether exhausted message body/headers should be logged with message history included (requires * logExhaustedMessageHistory to be enabled). */ public void setLogExhaustedMessageBody(Boolean logExhaustedMessageBody) { this.logExhaustedMessageBody = logExhaustedMessageBody; } public boolean isAsyncDelayedRedelivery() { return asyncDelayedRedelivery; } /** * Sets whether asynchronous delayed redelivery is allowed. * <p/> * This is disabled by default. * <p/> * When enabled it allows Camel to schedule a future task for delayed redelivery which prevents current thread from * blocking while waiting. * <p/> * Exchange which is transacted will however always use synchronous delayed redelivery because the transaction must * execute in the same thread context. * * @param asyncDelayedRedelivery whether asynchronous delayed redelivery is allowed */ public void setAsyncDelayedRedelivery(boolean asyncDelayedRedelivery) { this.asyncDelayedRedelivery = asyncDelayedRedelivery; } public boolean isAllowRedeliveryWhileStopping() { return allowRedeliveryWhileStopping; } /** * Controls whether to allow redelivery while stopping/shutting down a route that uses error handling. * * @param allowRedeliveryWhileStopping <tt>true</tt> to allow redelivery, <tt>false</tt> to reject redeliveries */ public void setAllowRedeliveryWhileStopping(boolean allowRedeliveryWhileStopping) { this.allowRedeliveryWhileStopping = allowRedeliveryWhileStopping; } public String getExchangeFormatterRef() { return exchangeFormatterRef; } /** * Sets the reference of the instance of {@link org.apache.camel.spi.ExchangeFormatter} to generate the log message * from exchange. */ public void setExchangeFormatterRef(String exchangeFormatterRef) { this.exchangeFormatterRef = exchangeFormatterRef; } }
apache-2.0
ajordens/orca
orca-core/src/main/java/com/netflix/spinnaker/orca/pipeline/StageDefinitionBuilderFactory.java
973
/* * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.orca.pipeline; import com.netflix.spinnaker.orca.api.pipeline.graph.StageDefinitionBuilder; import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution; import javax.annotation.Nonnull; @FunctionalInterface public interface StageDefinitionBuilderFactory { @Nonnull StageDefinitionBuilder builderFor(@Nonnull StageExecution stage); }
apache-2.0
daidong/DominoHBase
hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
5867
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IterableUtils; import org.apache.hadoop.hbase.util.Strings; import com.google.common.collect.Lists; @InterfaceAudience.Private public class KeyValueTestUtil { public static KeyValue create( String row, String family, String qualifier, long timestamp, String value) { return create(row, family, qualifier, timestamp, KeyValue.Type.Put, value); } public static KeyValue create( String row, String family, String qualifier, long timestamp, KeyValue.Type type, String value) { return new KeyValue( Bytes.toBytes(row), Bytes.toBytes(family), Bytes.toBytes(qualifier), timestamp, type, Bytes.toBytes(value) ); } public static ByteBuffer toByteBufferAndRewind(final Iterable<? extends KeyValue> kvs, boolean includeMemstoreTS) { int totalBytes = KeyValueTool.totalLengthWithMvccVersion(kvs, includeMemstoreTS); ByteBuffer bb = ByteBuffer.allocate(totalBytes); for (KeyValue kv : IterableUtils.nullSafe(kvs)) { KeyValueTool.appendToByteBuffer(bb, kv, includeMemstoreTS); } bb.rewind(); return bb; } public static List<KeyValue> rewindThenToList(final ByteBuffer bb, final boolean includesMemstoreTS) { bb.rewind(); List<KeyValue> kvs = Lists.newArrayList(); KeyValue kv = null; while (true) { kv = KeyValueTool.nextShallowCopy(bb, includesMemstoreTS); if (kv == null) { break; } kvs.add(kv); } return kvs; } /********************* toString ************************************/ public static String toStringWithPadding(final Collection<? extends KeyValue> kvs, final boolean includeMeta) { int maxRowStringLength = 0; int maxFamilyStringLength = 0; int maxQualifierStringLength = 0; int maxTimestampLength = 0; for (KeyValue kv : kvs) { maxRowStringLength = Math.max(maxRowStringLength, getRowString(kv).length()); maxFamilyStringLength = Math.max(maxFamilyStringLength, getFamilyString(kv).length()); maxQualifierStringLength = Math.max(maxQualifierStringLength, getQualifierString(kv) .length()); maxTimestampLength = Math.max(maxTimestampLength, Long.valueOf(kv.getTimestamp()).toString() .length()); } StringBuilder sb = new StringBuilder(); for (KeyValue kv : kvs) { if (sb.length() > 0) { sb.append("\n"); } String row = toStringWithPadding(kv, maxRowStringLength, maxFamilyStringLength, maxQualifierStringLength, maxTimestampLength, includeMeta); sb.append(row); } return sb.toString(); } protected static String toStringWithPadding(final KeyValue kv, final int maxRowLength, int maxFamilyLength, int maxQualifierLength, int maxTimestampLength, boolean includeMeta) { String leadingLengths = ""; String familyLength = kv.getFamilyLength() + " "; if (includeMeta) { leadingLengths += Strings.padFront(kv.getKeyLength() + "", '0', 4); leadingLengths += " "; leadingLengths += Strings.padFront(kv.getValueLength() + "", '0', 4); leadingLengths += " "; leadingLengths += Strings.padFront(kv.getRowLength() + "", '0', 2); leadingLengths += " "; } int spacesAfterRow = maxRowLength - getRowString(kv).length() + 2; int spacesAfterFamily = maxFamilyLength - getFamilyString(kv).length() + 2; int spacesAfterQualifier = maxQualifierLength - getQualifierString(kv).length() + 1; int spacesAfterTimestamp = maxTimestampLength - Long.valueOf(kv.getTimestamp()).toString().length() + 1; return leadingLengths + getRowString(kv) + Strings.repeat(' ', spacesAfterRow) + familyLength + getFamilyString(kv) + Strings.repeat(' ', spacesAfterFamily) + getQualifierString(kv) + Strings.repeat(' ', spacesAfterQualifier) + getTimestampString(kv) + Strings.repeat(' ', spacesAfterTimestamp) + getTypeString(kv) + " " + getValueString(kv); } protected static String getRowString(final KeyValue kv) { return Bytes.toStringBinary(kv.getRow()); } protected static String getFamilyString(final KeyValue kv) { return Bytes.toStringBinary(kv.getFamily()); } protected static String getQualifierString(final KeyValue kv) { return Bytes.toStringBinary(kv.getQualifier()); } protected static String getTimestampString(final KeyValue kv) { return kv.getTimestamp() + ""; } protected static String getTypeString(final KeyValue kv) { return KeyValue.Type.codeToType(kv.getType()).toString(); } protected static String getValueString(final KeyValue kv) { return Bytes.toStringBinary(kv.getValue()); } }
apache-2.0
rvs/incubator-geode
gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/wan/parallel/ConcurrentParallelGatewaySenderQueue.java
7058
/*========================================================================= * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * one or more patents listed at http://www.pivotal.io/patents. *========================================================================= */ /** * */ package com.gemstone.gemfire.internal.cache.wan.parallel; import com.gemstone.gemfire.cache.CacheException; import com.gemstone.gemfire.cache.CacheListener; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.internal.cache.Conflatable; import com.gemstone.gemfire.internal.cache.DistributedRegion; import com.gemstone.gemfire.internal.cache.PartitionedRegion; import com.gemstone.gemfire.internal.cache.RegionQueue; import com.gemstone.gemfire.internal.cache.wan.GatewaySenderEventImpl; import com.gemstone.gemfire.internal.cache.wan.parallel.ParallelGatewaySenderEventProcessor; import com.gemstone.gemfire.internal.cache.wan.parallel.ParallelGatewaySenderQueue; import java.util.List; import java.util.Set; import java.util.concurrent.BlockingQueue; import com.gemstone.gemfire.internal.size.SingleObjectSizer; /** * Queue built on top of {@link ParallelGatewaySenderQueue} which allows * multiple dispatcher to register and do peek/remove from the * underlying {@link ParallelGatewaySenderQueue} * * There is only one queue, but this class co-ordinates access * by multiple threads such that we get zero contention while peeking * or removing. * * It implements RegionQueue so that AbstractGatewaySenderEventProcessor * can work on it. * * @author Suranjan Kumar * */ public class ConcurrentParallelGatewaySenderQueue implements RegionQueue { private final ParallelGatewaySenderEventProcessor processors[]; public ConcurrentParallelGatewaySenderQueue( ParallelGatewaySenderEventProcessor pro[]) { this.processors = pro; } @Override public void put(Object object) throws InterruptedException, CacheException { throw new UnsupportedOperationException("CPGAQ method(put) is not supported"); } /*@Override public void close() { // no need to free peekedEvents since they all had makeOffHeap called on them. throw new UnsupportedOperationException("CPGAQ method(close) is not supported"); }*/ @Override public Region getRegion() { return this.processors[0].getQueue().getRegion(); } public PartitionedRegion getRegion(String fullpath) { return processors[0].getRegion(fullpath); } public Set<PartitionedRegion> getRegions() { return ((ParallelGatewaySenderQueue)(processors[0].getQueue())).getRegions(); } @Override public Object take() throws CacheException, InterruptedException { throw new UnsupportedOperationException("This method(take) is not suported"); } @Override public List take(int batchSize) throws CacheException, InterruptedException { throw new UnsupportedOperationException("This method(take) is not suported"); } @Override public void remove() throws CacheException { throw new UnsupportedOperationException("This method(remove) is not suported"); } @Override public Object peek() throws InterruptedException, CacheException { throw new UnsupportedOperationException("This method(peek) is not suported"); } @Override public List peek(int batchSize) throws InterruptedException, CacheException { throw new UnsupportedOperationException("This method(peek) is not suported"); } @Override public List peek(int batchSize, int timeToWait) throws InterruptedException, CacheException { throw new UnsupportedOperationException("This method(peek) is not suported"); } @Override public int size() { //is that fine?? return this.processors[0].getQueue().size(); } public int localSize() { return ((ParallelGatewaySenderQueue)(processors[0].getQueue())).localSize(); } @Override public void addCacheListener(CacheListener listener) { this.processors[0].getQueue().addCacheListener(listener); } @Override public void removeCacheListener() { this.processors[0].removeCacheListener(); } @Override public void remove(int top) throws CacheException { throw new UnsupportedOperationException("This method(remove) is not suported"); } /* public void resetLastPeeked(){ this.resetLastPeeked = true; }*/ public long estimateMemoryFootprint(SingleObjectSizer sizer) { long size = 0; for(int i=0; i< processors.length; i++) size += ((ParallelGatewaySenderQueue)this.processors[i].getQueue()).estimateMemoryFootprint(sizer); return size; } /*@Override public void release() { for(int i =0; i< processors.length; i++){ processors[i].getQueue().release(); } }*/ public void removeShadowPR(String prRegionName) { for(int i =0; i< processors.length; i++){ processors[i].removeShadowPR(prRegionName); } } public void addShadowPartitionedRegionForUserPR(PartitionedRegion pr) { for(int i =0; i< processors.length; i++){ processors[i].addShadowPartitionedRegionForUserPR(pr); } } private ParallelGatewaySenderEventProcessor getPGSProcessor(int bucketId) { int index = bucketId % this.processors.length; return processors[index]; } public BlockingQueue<GatewaySenderEventImpl> getBucketTmpQueue(int bucketId) { return getPGSProcessor(bucketId).getBucketTmpQueue(bucketId); } public void notifyEventProcessorIfRequired(int bucketId) { getPGSProcessor( bucketId).notifyEventProcessorIfRequired(bucketId); } /* public HDFSBucketRegionQueue getBucketRegionQueue(PartitionedRegion region, int bucketId) throws ForceReattemptException { return getPGSProcessor(bucketId).getBucketRegionQueue(region, bucketId); }*/ public void clear(PartitionedRegion pr, int bucketId) { getPGSProcessor(bucketId).clear(pr, bucketId); } public void cleanUp() { for(int i=0; i< processors.length; i++) ((ParallelGatewaySenderQueue)this.processors[i].getQueue()).cleanUp(); } public void conflateEvent(Conflatable conflatableObject, int bucketId, Long tailKey) { getPGSProcessor(bucketId).conflateEvent(conflatableObject, bucketId, tailKey); } /* public HDFSGatewayEventImpl get(PartitionedRegion region, byte[] regionKey, int bucketId) throws ForceReattemptException { return getPGSProcessor(bucketId).get(region, regionKey, bucketId); }*/ public void addShadowPartitionedRegionForUserRR(DistributedRegion userRegion) { for(int i =0; i< processors.length; i++){ processors[i].addShadowPartitionedRegionForUserRR(userRegion);; } } public long getNumEntriesInVMTestOnly() { return ((ParallelGatewaySenderQueue)(processors[0].getQueue())).getNumEntriesInVMTestOnly(); } public long getNumEntriesOverflowOnDiskTestOnly() { return ((ParallelGatewaySenderQueue)(processors[0].getQueue())).getNumEntriesOverflowOnDiskTestOnly(); } }
apache-2.0
nishantmonu51/druid
core/src/main/java/org/apache/druid/math/expr/vector/ExprEvalStringVector.java
2375
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.math.expr.vector; import org.apache.druid.common.config.NullHandling; import org.apache.druid.math.expr.ExprEval; import org.apache.druid.math.expr.ExpressionType; import javax.annotation.Nullable; public final class ExprEvalStringVector extends ExprEvalVector<String[]> { @Nullable private long[] longs; @Nullable private double[] doubles; @Nullable private boolean[] numericNulls; public ExprEvalStringVector(String[] values) { super(values, null); } private void computeNumbers() { if (longs == null) { longs = new long[values.length]; doubles = new double[values.length]; numericNulls = new boolean[values.length]; for (int i = 0; i < values.length; i++) { Number n = ExprEval.computeNumber(values[i]); if (n != null) { longs[i] = n.longValue(); doubles[i] = n.doubleValue(); numericNulls[i] = false; } else { longs[i] = 0L; doubles[i] = 0.0; numericNulls[i] = NullHandling.sqlCompatible(); } } } } @Nullable @Override public boolean[] getNullVector() { computeNumbers(); return numericNulls; } @Override public ExpressionType getType() { return ExpressionType.STRING; } @Override public long[] getLongVector() { computeNumbers(); return longs; } @Override public double[] getDoubleVector() { computeNumbers(); return doubles; } @Override public Object[] getObjectVector() { return values; } }
apache-2.0
wmedvede/uberfire-extensions
uberfire-widgets/uberfire-widgets-commons/src/main/java/org/uberfire/ext/widgets/common/client/ace/AceEditor.java
20092
// Copyright (c) 2011-2014, David H. Hovemeyer <david.hovemeyer@gmail.com> // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package org.uberfire.ext.widgets.common.client.ace; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.dom.client.Element; import com.google.gwt.user.client.TakesValue; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HasText; import com.google.gwt.user.client.ui.RequiresResize; /** * A GWT widget for the Ajax.org Code Editor (ACE). * @see <a href="http://ace.ajax.org/">Ajax.org Code Editor</a> */ public class AceEditor extends Composite implements RequiresResize, HasText, TakesValue<String> { // Used to generate unique element ids for Ace widgets. private static int nextId = 0; private final String elementId; private JavaScriptObject editor; private JsArray<AceAnnotation> annotations = JavaScriptObject.createArray().cast(); private Element divElement; /** * Preferred constructor. */ public AceEditor() { elementId = "_aceGWT" + nextId; nextId++; FlowPanel div = new FlowPanel(); div.getElement().setId( elementId ); initWidget( div ); divElement = div.getElement(); getElement().setAttribute( "data-uf-lock-on-click", "false" ); } /** * Do not use this constructor: just use the default constructor. */ @Deprecated public AceEditor( boolean unused ) { this(); } /** * Call this method to start the editor. * Make sure that the widget has been attached to the DOM tree * before calling this method. */ public native void startEditor() /*-{ var editor = $wnd.ace.edit(this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::divElement); editor.getSession().setUseWorker(false); this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor = editor; // Store a reference to the (Java) AceEditor object in the // JavaScript editor object. editor._aceGWTAceEditor = this; // I have been noticing sporadic failures of the editor // to display properly and receive key/mouse events. // Try to force the editor to resize and display itself fully. See: // https://groups.google.com/group/ace-discuss/browse_thread/thread/237262b521dcea33 editor.resize(); this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::redisplay(); }-*/; /** * Call this to force the editor contents to be redisplayed. * There seems to be a problem when an AceEditor is embedded in a LayoutPanel: * the editor contents don't appear, and it refuses to accept focus * and mouse events, until the browser window is resized. * Calling this method works around the problem by forcing * the underlying editor to redisplay itself fully. (?) */ public native void redisplay() /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.renderer.onResize(true); editor.renderer.updateFull(); editor.resize(); editor.focus(); }-*/; /** * Cleans up the entire editor. */ public native void destroy() /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.destroy(); }-*/; /** * Set the theme. * @param theme the theme (one of the values in the {@link AceEditorTheme} * enumeration) */ public void setTheme( final AceEditorTheme theme ) { setThemeByName( theme.getName() ); } /** * Set the theme by name. * @param themeName the theme name (e.g., "twilight") */ public native void setThemeByName( String themeName ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.setTheme("ace/theme/" + themeName); }-*/; /** * Set the mode. * @param mode the mode (one of the values in the * {@link AceEditorMode} enumeration) */ public void setMode( final AceEditorMode mode ) { setModeByName( mode.getName() ); } /** * Set the mode by name. * @param shortModeName name of mode (e.g., "eclipse") */ public native void setModeByName( String shortModeName ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; var modeName = "ace/mode/" + shortModeName; var TheMode = $wnd.require(modeName).Mode; editor.getSession().setMode(new TheMode()); }-*/; /** * Register a handler for change events generated by the editor. * @param callback the change event handler */ public native void addOnChangeHandler( AceEditorCallback callback ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.getSession().on("change", function (e) { callback.@org.uberfire.ext.widgets.common.client.ace.AceEditorCallback::invokeAceCallback(Lcom/google/gwt/core/client/JavaScriptObject;)(e); }); }-*/; /** * Register a handler for cursor position change events generated by the editor. * @param callback the cursor position change event handler */ public native void addOnCursorPositionChangeHandler( AceEditorCallback callback ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.getSession().selection.on("changeCursor", function (e) { callback.@org.uberfire.ext.widgets.common.client.ace.AceEditorCallback::invokeAceCallback(Lcom/google/gwt/core/client/JavaScriptObject;)(e); }); }-*/; /** * Set font size. */ public native void setFontSize( String fontSize ) /*-{ var elementId = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::elementId; var elt = $doc.getElementById(elementId); elt.style.fontSize = fontSize; }-*/; /** * Get the complete text in the editor as a String. * @return the text in the editor */ public native String getText() /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; return editor.getSession().getValue(); }-*/; /** * Set the complete text in the editor from a String. * @param text the text to set in the editor */ public native void setText( String text ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.getSession().setValue(text); }-*/; /** * Get the line of text at the given row number. * @param row the row number * @return the line of text at that row number */ public native String getLine( int row ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; return editor.getSession().getDocument().getLine(row); }-*/; /** * Insert given text at the cursor. * @param text text to insert at the cursor */ public native void insertAtCursor( String text ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.insert(text); }-*/; /** * Get the current cursor position. * @return the current cursor position */ public native AceEditorCursorPosition getCursorPosition() /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; var pos = editor.getCursorPosition(); return this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::getCursorPositionImpl(DD)(pos.row, pos.column); }-*/; private AceEditorCursorPosition getCursorPositionImpl( final double row, final double column ) { return new AceEditorCursorPosition( (int) row, (int) column ); } /** * Gets the given document position as a zero-based index. * @param position the position to obtain the absolute index of (base zero) * @return An index to the current location in the document */ public int getIndexFromPosition( AceEditorCursorPosition position ) { return getIndexFromPositionImpl( position.toJsObject() ); } private native int getIndexFromPositionImpl( JavaScriptObject jsPosition ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; return editor.getSession().getDocument().positionToIndex(jsPosition); }-*/; /** * Gets a document position from a supplied zero-based index. * @param index (base zero) * @return A position object showing the row and column of the supplied index in the document */ public native AceEditorCursorPosition getPositionFromIndex( int index ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; var jsPosition = editor.getSession().getDocument().indexToPosition(index); return @org.uberfire.ext.widgets.common.client.ace.AceEditorCursorPosition::create(II)( jsPosition.row, jsPosition.column ); }-*/; /** * Set whether or not soft tabs should be used. * @param useSoftTabs true if soft tabs should be used, false otherwise */ public native void setUseSoftTabs( boolean useSoftTabs ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.getSession().setUseSoftTabs(useSoftTabs); }-*/; /** * Set tab size. (Default is 4.) * @param tabSize the tab size to set */ public native void setTabSize( int tabSize ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.getSession().setTabSize(tabSize); }-*/; /** * Go to given line. * @param line the line to go to */ public native void gotoLine( int line ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.gotoLine(line); }-*/; /** * Go to given line. * @param line the line to go to */ public native void scrollToLine( int line ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.resize(true); editor.scrollToLine(line, true, true, function () {}); }-*/; /** * Set whether or not the horizontal scrollbar is always visible. * @param hScrollBarAlwaysVisible true if the horizontal scrollbar is always * visible, false if it is hidden when not needed */ public native void setHScrollBarAlwaysVisible( boolean hScrollBarAlwaysVisible ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.renderer.setHScrollBarAlwaysVisible(hScrollBarAlwaysVisible); }-*/; /** * Set whether or not the gutter is shown. * @param showGutter true if the gutter should be shown, false if it should be hidden */ public native void setShowGutter( boolean showGutter ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.renderer.setShowGutter(showGutter); }-*/; /** * Set or unset read-only mode. * @param readOnly true if editor should be set to readonly, false if the * editor should be set to read-write */ public native void setReadOnly( boolean readOnly ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.setReadOnly(readOnly); }-*/; /** * Set or unset highlighting of currently selected word. * @param highlightSelectedWord true to highlight currently selected word, false otherwise */ public native void setHighlightSelectedWord( boolean highlightSelectedWord ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.setHighlightSelectedWord(highlightSelectedWord); }-*/; /** * Set or unset the visibility of the print margin. * @param showPrintMargin true if the print margin should be shown, false otherwise */ public native void setShowPrintMargin( boolean showPrintMargin ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.renderer.setShowPrintMargin(showPrintMargin); }-*/; /** * Add an annotation to a the local <code>annotations</code> JsArray<AceAnnotation>, but does not set it on the editor * @param row to which the annotation should be added * @param column to which the annotation applies * @param text to display as a tooltip with the annotation * @param type to be displayed (one of the values in the * {@link AceAnnotationType} enumeration) */ public void addAnnotation( final int row, final int column, final String text, final AceAnnotationType type ) { annotations.push( AceAnnotation.create( row, column, text, type.getName() ) ); } /** * Set any annotations which have been added via <code>addAnnotation</code> on the editor */ public native void setAnnotations() /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; var annotations = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::annotations; editor.getSession().setAnnotations(annotations); }-*/; /** * Clear any annotations from the editor and reset the local <code>annotations</code> JsArray<AceAnnotation> */ public native void clearAnnotations() /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.getSession().clearAnnotations(); this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::resetAnnotations()(); }-*/; /** * Reset any annotations in the local <code>annotations</code> JsArray<AceAnnotation> */ private void resetAnnotations() { annotations = JavaScriptObject.createArray().cast(); } /** * Remove a command from the editor. * @param command the command (one of the values in the * {@link AceCommand} enumeration) */ public void removeCommand( final AceCommand command ) { removeCommandByName( command.getName() ); } /** * Remove commands, that may not be required, from the editor * @param command to be removed, one of * "gotoline", "findnext", "findprevious", "find", "replace", "replaceall" */ public native void removeCommandByName( String command ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.commands.removeCommand(command); }-*/; /** * Set whether to use wrap mode or not * @param useWrapMode true if word wrap should be used, false otherwise */ public native void setUseWrapMode( boolean useWrapMode ) /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.getSession().setUseWrapMode(useWrapMode); }-*/; /* (non-Javadoc) * @see com.google.gwt.user.client.ui.ResizeComposite#onResize() */ @Override public void onResize() { redisplay(); } @Override public void setValue( String value ) { this.setText( value ); } @Override public String getValue() { return this.getText(); } /** * Set whether or not autocomplete is enabled. * @param b true if autocomplete should be enabled, false if not */ public native void setAutocompleteEnabled( boolean b ) /*-{ // See: https://github.com/ajaxorg/ace/wiki/How-to-enable-Autocomplete-in-the-Ace-editor var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; if (b) { $wnd.ace.require("ace/ext/language_tools"); editor.setOptions({ enableBasicAutocompletion: true }); } else { editor.setOptions({ enableBasicAutocompletion: false }); } }-*/; /** * Removes all existing completers from the langtools<br/><br/> * This can be used to disable all completers including local completers, which can be very useful * when completers are used on very large files (as the local completer tokenizes every word to put in the selected list).<br/><br/> * <strong>NOTE:</strong> This method may be removed, and replaced with another solution. It works at point of check-in, but treat this as unstable for now. */ public native static void removeAllExistingCompleters() /*-{ var langTools = $wnd.ace.require("ace/ext/language_tools"); langTools.removeCompleters(); }-*/; /** * Add an {@link AceCompletionProvider} to provide * custom code completions. * <p/> * <strong>Warning</strong>: this is an experimental feature of AceGWT. * It is possible that the API will change in an incompatible way * in future releases. * @param provider the {@link AceCompletionProvider} */ public native static void addCompletionProvider( AceCompletionProvider provider ) /*-{ var langTools = $wnd.ace.require("ace/ext/language_tools"); var completer = { getCompletions: function (editor, session, pos, prefix, callback) { var callbackWrapper = @org.uberfire.ext.widgets.common.client.ace.AceEditor::wrapCompletionCallback(Lcom/google/gwt/core/client/JavaScriptObject;)(callback); var aceEditor = editor._aceGWTAceEditor; provider.@org.uberfire.ext.widgets.common.client.ace.AceCompletionProvider::getProposals(Lorg/uberfire/ext/widgets/common/client/ace/AceEditor;Lorg/uberfire/ext/widgets/common/client/ace/AceEditorCursorPosition;Ljava/lang/String;Lorg/uberfire/ext/widgets/common/client/ace/AceCompletionCallback;)( aceEditor, @org.uberfire.ext.widgets.common.client.ace.AceEditorCursorPosition::create(II)(pos.row, pos.column), prefix, callbackWrapper ); } }; langTools.addCompleter(completer); }-*/; private static AceCompletionCallback wrapCompletionCallback( JavaScriptObject jsCallback ) { return new AceCompletionCallbackImpl( jsCallback ); } public native void setFocus() /*-{ var editor = this.@org.uberfire.ext.widgets.common.client.ace.AceEditor::editor; editor.focus(); }-*/; }
apache-2.0
GabrielBrascher/cloudstack
api/src/main/java/org/apache/cloudstack/api/command/user/snapshot/RevertSnapshotCmd.java
4367
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cloudstack.api.command.user.snapshot; import org.apache.cloudstack.acl.SecurityChecker.AccessType; import org.apache.cloudstack.api.ACL; import org.apache.cloudstack.api.APICommand; import org.apache.cloudstack.api.ApiCommandJobType; import org.apache.cloudstack.api.ApiConstants; import org.apache.cloudstack.api.ApiErrorCode; import org.apache.cloudstack.api.BaseAsyncCmd; import org.apache.cloudstack.api.BaseCmd; import org.apache.cloudstack.api.Parameter; import org.apache.cloudstack.api.ServerApiException; import org.apache.cloudstack.api.response.SnapshotResponse; import org.apache.cloudstack.context.CallContext; import org.apache.log4j.Logger; import com.cloud.event.EventTypes; import com.cloud.storage.Snapshot; import com.cloud.user.Account; @APICommand(name = "revertSnapshot", description = "This is supposed to revert a volume snapshot. This command is only supported with KVM so far", responseObject = SnapshotResponse.class, entityType = {Snapshot.class}, requestHasSensitiveInfo = false, responseHasSensitiveInfo = false) public class RevertSnapshotCmd extends BaseAsyncCmd { public static final Logger s_logger = Logger.getLogger(RevertSnapshotCmd.class.getName()); private static final String s_name = "revertsnapshotresponse"; ///////////////////////////////////////////////////// //////////////// API parameters ///////////////////// ///////////////////////////////////////////////////// @ACL(accessType = AccessType.OperateEntry) @Parameter(name= ApiConstants.ID, type= BaseCmd.CommandType.UUID, entityType = SnapshotResponse.class, required=true, description="The ID of the snapshot") private Long id; ///////////////////////////////////////////////////// /////////////////// Accessors /////////////////////// ///////////////////////////////////////////////////// public Long getId() { return id; } ///////////////////////////////////////////////////// /////////////// API Implementation/////////////////// ///////////////////////////////////////////////////// @Override public String getCommandName() { return s_name; } @Override public long getEntityOwnerId() { Snapshot snapshot = _entityMgr.findById(Snapshot.class, getId()); if (snapshot != null) { return snapshot.getAccountId(); } return Account.ACCOUNT_ID_SYSTEM; // no account info given, parent this command to SYSTEM so ERROR events are tracked } @Override public String getEventType() { return EventTypes.EVENT_SNAPSHOT_REVERT; } @Override public String getEventDescription() { return "revert snapshot: " + this._uuidMgr.getUuid(Snapshot.class, getId()); } @Override public ApiCommandJobType getInstanceType() { return ApiCommandJobType.Snapshot; } @Override public Long getInstanceId() { return getId(); } @Override public void execute() { CallContext.current().setEventDetails("Snapshot Id: " + this._uuidMgr.getUuid(Snapshot.class, getId())); Snapshot snapshot = _snapshotService.revertSnapshot(getId()); if (snapshot != null) { SnapshotResponse response = _responseGenerator.createSnapshotResponse(snapshot); response.setResponseName(getCommandName()); setResponseObject(response); } else { throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to revert snapshot"); } } }
apache-2.0
mirego/j2objc
jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/text/StringCharacterIterator.java
8576
/* GENERATED SOURCE. DO NOT MODIFY. */ // © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html#License /* ******************************************************************************* * Copyright (C) 1996-2014, International Business Machines Corporation and * * others. All Rights Reserved. * ******************************************************************************* */ // NOTE: This class is identical to java.text.StringCharacterIterator // in JDK 1.2. It's copied here because the JDK 1.1 version of // StringCharacterIterator has a bug that prevents it from working // right with RuleBasedBreakIterator. This class is unnecessary // when using RuleBasedBreakIterator with JDK 1.2. package android.icu.text; import java.text.CharacterIterator; import android.icu.util.ICUCloneNotSupportedException; /** * <code>StringCharacterIterator</code> implements the * <code>CharacterIterater</code> protocol for a <code>String</code>. * The <code>StringCharacterIterator</code> class iterates over the * entire <code>String</code>. * * @see CharacterIterator * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. * @hide Only a subset of ICU is exposed in Android */ @Deprecated ///CLOVER:OFF public final class StringCharacterIterator implements CharacterIterator { private String text; private int begin; private int end; // invariant: begin <= pos <= end private int pos; /** * Constructs an iterator with an initial index of 0. * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public StringCharacterIterator(String text) { this(text, 0); } /** * Constructs an iterator with the specified initial index. * * @param text The String to be iterated over * @param pos Initial iterator position * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public StringCharacterIterator(String text, int pos) { this(text, 0, text.length(), pos); } /** * Constructs an iterator over the given range of the given string, with the * index set at the specified position. * * @param text The String to be iterated over * @param begin Index of the first character * @param end Index of the character following the last character * @param pos Initial iterator position * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public StringCharacterIterator(String text, int begin, int end, int pos) { if (text == null) { throw new NullPointerException(); } this.text = text; if (begin < 0 || begin > end || end > text.length()) { throw new IllegalArgumentException("Invalid substring range"); } if (pos < begin || pos > end) { throw new IllegalArgumentException("Invalid position"); } this.begin = begin; this.end = end; this.pos = pos; } /** * Reset this iterator to point to a new string. This package-visible * method is used by other java.text classes that want to avoid allocating * new StringCharacterIterator objects every time their setText method * is called. * * @param text The String to be iterated over * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public void setText(String text) { if (text == null) { throw new NullPointerException(); } this.text = text; this.begin = 0; this.end = text.length(); this.pos = 0; } /** * Implements CharacterIterator.first() for String. * @see CharacterIterator#first * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public char first() { pos = begin; return current(); } /** * Implements CharacterIterator.last() for String. * @see CharacterIterator#last * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public char last() { if (end != begin) { pos = end - 1; } else { pos = end; } return current(); } /** * Implements CharacterIterator.setIndex() for String. * @see CharacterIterator#setIndex * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public char setIndex(int p) { if (p < begin || p > end) { throw new IllegalArgumentException("Invalid index"); } pos = p; return current(); } /** * Implements CharacterIterator.current() for String. * @see CharacterIterator#current * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public char current() { if (pos >= begin && pos < end) { return text.charAt(pos); } else { return DONE; } } /** * Implements CharacterIterator.next() for String. * @see CharacterIterator#next * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public char next() { if (pos < end - 1) { pos++; return text.charAt(pos); } else { pos = end; return DONE; } } /** * Implements CharacterIterator.previous() for String. * @see CharacterIterator#previous * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public char previous() { if (pos > begin) { pos--; return text.charAt(pos); } else { return DONE; } } /** * Implements CharacterIterator.getBeginIndex() for String. * @see CharacterIterator#getBeginIndex * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public int getBeginIndex() { return begin; } /** * Implements CharacterIterator.getEndIndex() for String. * @see CharacterIterator#getEndIndex * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public int getEndIndex() { return end; } /** * Implements CharacterIterator.getIndex() for String. * @see CharacterIterator#getIndex * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public int getIndex() { return pos; } /** * Compares the equality of two StringCharacterIterator objects. * @param obj the StringCharacterIterator object to be compared with. * @return true if the given obj is the same as this * StringCharacterIterator object; false otherwise. * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof StringCharacterIterator)) { return false; } StringCharacterIterator that = (StringCharacterIterator) obj; if (hashCode() != that.hashCode()) { return false; } if (!text.equals(that.text)) { return false; } if (pos != that.pos || begin != that.begin || end != that.end) { return false; } return true; } /** * Computes a hashcode for this iterator. * @return A hash code * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public int hashCode() { return text.hashCode() ^ pos ^ begin ^ end; } /** * Creates a copy of this iterator. * @return A copy of this * @deprecated ICU 2.4. Use java.text.StringCharacterIterator instead. */ @Deprecated public Object clone() { try { StringCharacterIterator other = (StringCharacterIterator) super.clone(); return other; } catch (CloneNotSupportedException e) { throw new ICUCloneNotSupportedException(e); } } } ///CLOVER:ON
apache-2.0
mtseu/pentaho-hadoop-shims
api/src/org/pentaho/oozie/shim/api/OozieClientException.java
1263
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2014 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.oozie.shim.api; public class OozieClientException extends Exception { private static final long serialVersionUID = 2603554509709959992L; private final String errorCode; public OozieClientException( Throwable cause, String errorCode ) { super( cause ); this.errorCode = errorCode; } public String getErrorCode() { return errorCode; } }
apache-2.0
PHILO-HE/SSM
smart-rule/src/test/java/org/smartdata/rule/TestSmartRuleParser.java
5658
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.smartdata.rule; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Recognizer; import org.antlr.v4.runtime.tree.ParseTree; import org.junit.Assert; import org.junit.Test; import org.smartdata.model.rule.TranslateResult; import org.smartdata.rule.parser.SmartRuleLexer; import org.smartdata.rule.parser.SmartRuleParser; import org.smartdata.rule.parser.SmartRuleVisitTranslator; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; public class TestSmartRuleParser { List<RecognitionException> parseErrors = new ArrayList<RecognitionException>(); public class SSMRuleErrorListener extends BaseErrorListener { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { List<String> stack = ((Parser) recognizer).getRuleInvocationStack(); Collections.reverse(stack); System.err.println("rule stack: " + stack); System.err.println("line " + line + ":" + charPositionInLine + " at " + offendingSymbol + ": " + msg); parseErrors.add(e); } } @Test public void testValidRule() throws Exception { List<String> rules = new LinkedList<>(); rules.add("file : accessCount(10m) > 10 and accessCount(10m) < 20 " + "| cache"); rules.add("file with path matches \"/a/b*.dat\" : " + "every 5s from \"2013-07-09 19:21:34\" to now + (7d + 4s ) | " + "inCache or accessCount(10m) > 10 and 10d > 20s | cache"); rules.add("file with length > 1GB : " + "blocksize > 1 + 3 and accessCount(30s) > 3 " + "and storage.free(\"SSD\") > 100 | cache"); rules.add("file with length > 3 : " + "storage.free(\"SSD\") > 100 and not inCache | cache"); rules.add("file : accessCount(10min) > 20 | cache"); rules.add("file: every 5s from now to now + 10d | length > 3 | cache"); rules.add("file: every 5s | length > 100mb | onessd"); rules.add("file: every 50ms | length > 100mb | onessd"); rules.add("file : every 1s | age > 100day | cache"); rules.add("file : every 1s | mtime > \"2016-09-13 12:05:06\" | cache"); rules.add("file : every 1s | mtime > now - 70day | cache"); rules.add("file : every 1s | storagePolicy == \"ALL_SSD\" | cache"); rules.add("file : accessCount(10min) < 20 | uncache"); rules.add("file : accessCount(10min) == 0 | uncache"); rules.add("file : accessCount(10min) <= 1 | uncache"); rules.add("file : accessCount(1min) > 5 | cache -replica 2"); rules.add("file : age <= 1 | echo -msg \"crul world\""); rules.add("file : age <= 1 | read ; read"); rules.add("file : age <= 1 | read ; sync -dest hdfs://{}[]@&$=?!"); for (String rule : rules) { parseAndExecuteRule(rule); } } @Test public void testInvalidRule() throws Exception { List<String> rules = new LinkedList<>(); rules.add("someobject: length > 3mb | cache"); rules.add("file : length > 3day | cache"); rules.add("file : length() > 3tb | cache"); rules.add("file : accessCount(10m) > 2 and length() > 3 | cache"); rules.add("file : every 1s | mtime > 100s | cache"); for (String rule : rules) { try { parseAndExecuteRule(rule); Assert.fail("Should have exception here!"); } catch (Exception e) { // ignore } } } private void parseAndExecuteRule(String rule) throws Exception { System.out.println("--> " + rule); InputStream input = new ByteArrayInputStream(rule.getBytes()); ANTLRInputStream antlrInput = new ANTLRInputStream(input); SmartRuleLexer lexer = new SmartRuleLexer(antlrInput); CommonTokenStream tokens = new CommonTokenStream(lexer); SmartRuleParser parser = new SmartRuleParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(new SSMRuleErrorListener()); ParseTree tree = parser.ssmrule(); System.out.println("Parser tree: " + tree.toStringTree(parser)); System.out.println("Total number of errors: " + parseErrors.size()); SmartRuleVisitTranslator visitor = new SmartRuleVisitTranslator(); visitor.visit(tree); System.out.println("\nQuery:"); TranslateResult result = visitor.generateSql(); int index = 1; for (String sql : result.getSqlStatements()) { System.out.println("" + index + ". " + sql); index++; } if (parseErrors.size() > 0) { throw new IOException("Error while parse rule"); } } }
apache-2.0
ingokegel/intellij-community
plugins/InspectionGadgets/src/com/siyeh/ig/abstraction/TypeMayBeWeakenedInspection.java
25344
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.siyeh.ig.abstraction; import com.intellij.codeInsight.daemon.impl.UnusedSymbolUtil; import com.intellij.codeInsight.daemon.impl.analysis.JavaHighlightUtil; import com.intellij.codeInsight.intention.LowPriorityAction; import com.intellij.codeInspection.*; import com.intellij.codeInspection.ui.ListTable; import com.intellij.codeInspection.ui.ListWrappingTableModel; import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel; import com.intellij.codeInspection.util.InspectionMessage; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.undo.BasicUndoableAction; import com.intellij.openapi.command.undo.UndoManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.openapi.ui.popup.PopupStep; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.profile.codeInspection.ProjectInspectionProfileManager; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.search.searches.OverridingMethodsSearch; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.ScrollPaneFactory; import com.intellij.util.ObjectUtils; import com.intellij.util.Query; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.OrderedSet; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.psiutils.ClassUtils; import com.siyeh.ig.psiutils.CommentTracker; import com.siyeh.ig.psiutils.MethodUtils; import com.siyeh.ig.psiutils.WeakestTypeFinder; import com.siyeh.ig.ui.UiUtils; import org.jdom.Attribute; import org.jdom.Content; import org.jdom.Element; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.*; public class TypeMayBeWeakenedInspection extends AbstractBaseJavaLocalInspectionTool { @SuppressWarnings({"PublicField", "WeakerAccess"}) public boolean useRighthandTypeAsWeakestTypeInAssignments = true; @SuppressWarnings({"PublicField", "WeakerAccess"}) public boolean useParameterizedTypeForCollectionMethods = true; @SuppressWarnings({"PublicField", "WeakerAccess"}) public boolean doNotWeakenToJavaLangObject = true; @SuppressWarnings("PublicField") public boolean onlyWeakentoInterface = true; @SuppressWarnings("PublicField") public boolean doNotWeakenReturnType = true; @SuppressWarnings({"PublicField", "WeakerAccess"}) public boolean doNotWeakenInferredVariableType; public OrderedSet<String> myStopClassSet = new OrderedSet<>(); private final ListWrappingTableModel myStopClassesModel = new ListWrappingTableModel(myStopClassSet, InspectionGadgetsBundle.message("inspection.type.may.be.weakened.add.stop.class.selection.table")); class AddStopWordQuickfix implements LowPriorityAction, LocalQuickFix { private final List<String> myCandidates; AddStopWordQuickfix(@NotNull List<String> candidates) { myCandidates = candidates; } @Nls @NotNull @Override public String getName() { if (myCandidates.size() == 1) { return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.add.stopper.single", myCandidates.get(0)); } return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.add.stopper"); } @Nls @NotNull @Override public String getFamilyName() { return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.add.stop.class.family"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { if (myCandidates.size() == 1) { addClass(myCandidates.get(0), descriptor.getPsiElement()); return; } Editor editor = FileEditorManager.getInstance(project).getSelectedTextEditor(); if (editor == null) return; String hint = InspectionGadgetsBundle.message("inspection.type.may.be.weakened.add.stop.class.selection.popup"); ListPopup popup = JBPopupFactory.getInstance().createListPopup(new BaseListPopupStep<>(hint, myCandidates) { @Override public PopupStep onChosen(String selectedValue, boolean finalChoice) { CommandProcessor.getInstance().executeCommand(project, () -> addClass(selectedValue, descriptor.getPsiElement()), InspectionGadgetsBundle.message("inspection.type.may.be.weakened.add.stopper"), null); return super.onChosen(selectedValue, finalChoice); } }); popup.showInBestPositionFor(editor); } @Override public boolean startInWriteAction() { return false; } } @Override public void readSettings(@NotNull Element node) { List<Element> options = node.getChildren("option"); Map<String, String> values = new HashMap<>(); for (Element option : options) { Attribute nameAttribute = option.getAttribute("name"); if (nameAttribute == null) continue; Attribute valueAttribute = option.getAttribute("value"); if (valueAttribute == null) continue; values.put(nameAttribute.getValue(), valueAttribute.getValue()); } useRighthandTypeAsWeakestTypeInAssignments = readOrDefault(values, "useRighthandTypeAsWeakestTypeInAssignments", useRighthandTypeAsWeakestTypeInAssignments); useParameterizedTypeForCollectionMethods = readOrDefault(values, "useParameterizedTypeForCollectionMethods", useParameterizedTypeForCollectionMethods); doNotWeakenToJavaLangObject = readOrDefault(values, "doNotWeakenToJavaLangObject", doNotWeakenToJavaLangObject); onlyWeakentoInterface = readOrDefault(values, "onlyWeakentoInterface", onlyWeakentoInterface); doNotWeakenReturnType = readOrDefault(values, "doNotWeakenReturnType", doNotWeakenReturnType); doNotWeakenInferredVariableType = readOrDefault(values, "doNotWeakenInferredVariableType", doNotWeakenInferredVariableType); readStopClasses(node); } private static boolean readOrDefault(@NotNull Map<String, String> options, @NotNull String name, boolean defaultValue) { String value = options.get(name); if (value == null) return defaultValue; return Boolean.parseBoolean(value); } private void readStopClasses(@NotNull Element node) { List<Element> classes = node.getChildren("stopClasses"); if (classes.isEmpty()) return; Element element = classes.get(0); List<Content> contentList = element.getContent(); if (contentList.isEmpty()) return; String text = contentList.get(0).getValue(); myStopClassSet.addAll(Arrays.asList(text.split(","))); } @Override public void writeSettings(@NotNull Element node) { writeBool(node, useRighthandTypeAsWeakestTypeInAssignments, "useRighthandTypeAsWeakestTypeInAssignments"); writeBool(node, useParameterizedTypeForCollectionMethods, "useParameterizedTypeForCollectionMethods"); writeBool(node, doNotWeakenToJavaLangObject, "doNotWeakenToJavaLangObject"); writeBool(node, onlyWeakentoInterface, "onlyWeakentoInterface"); if (!doNotWeakenReturnType) { writeBool(node, false, "doNotWeakenReturnType"); } if (doNotWeakenInferredVariableType) { writeBool(node, true, "doNotWeakenInferredVariableType"); } if (!myStopClassSet.isEmpty()) { Element stopClasses = new Element("stopClasses"); stopClasses.addContent(String.join(",", myStopClassSet)); node.addContent(stopClasses); } } private static void writeBool(@NotNull Element node, boolean value, @NotNull @NonNls String name) { Element optionElement = new Element("option"); optionElement.setAttribute("name", name); optionElement.setAttribute("value", String.valueOf(value)); node.addContent(optionElement); } private void addClass(@NotNull String stopClass, @NotNull PsiElement context) { if (myStopClassSet.add(stopClass)) { final Project project = context.getProject(); ProjectInspectionProfileManager.getInstance(project).fireProfileChanged(); final VirtualFile vFile = PsiUtilCore.getVirtualFile(context); UndoManager.getInstance(project).undoableActionPerformed(new BasicUndoableAction(vFile) { @Override public void undo() { myStopClassSet.remove(stopClass); ProjectInspectionProfileManager.getInstance(project).fireProfileChanged(); } @Override public void redo() { myStopClassSet.add(stopClass); ProjectInspectionProfileManager.getInstance(project).fireProfileChanged(); } @Override public boolean isGlobal() { return true; } }); } } private static String getClassName(@NotNull PsiClass aClass) { final String qualifiedName = aClass.getQualifiedName(); return qualifiedName == null ? aClass.getName() : qualifiedName; } @Override @NotNull public JComponent createOptionsPanel() { final MultipleCheckboxOptionsPanel optionsPanel = new MultipleCheckboxOptionsPanel(this); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("inspection.type.may.be.weakened.ignore.option"), "useRighthandTypeAsWeakestTypeInAssignments"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("inspection.type.may.be.weakened.collection.method.option"), "useParameterizedTypeForCollectionMethods"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("inspection.type.may.be.weakened.do.not.weaken.to.object.option"), "doNotWeakenToJavaLangObject"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("inspection.type.may.be.weakened.only.weaken.to.an.interface"), "onlyWeakentoInterface"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("inspection.type.may.be.weakened.do.not.weaken.return.type"), "doNotWeakenReturnType"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("inspection.type.may.be.weakened.do.not.weaken.inferred.variable.type"), "doNotWeakenInferredVariableType"); final ListTable stopClassesTable = new ListTable(myStopClassesModel); final JPanel stopClassesPanel = UiUtils.createAddRemoveTreeClassChooserPanel(stopClassesTable, InspectionGadgetsBundle .message("inspection.type.may.be.weakened.add.stop.class.selection.table"), CommonClassNames.JAVA_LANG_OBJECT); optionsPanel.add(stopClassesPanel, "growx"); return ScrollPaneFactory.createScrollPane(optionsPanel, true); } private static class TypeMayBeWeakenedFix implements LocalQuickFix { private final String fqClassName; TypeMayBeWeakenedFix(@NotNull String fqClassName) { this.fqClassName = fqClassName; } @Override @NotNull public String getName() { return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.quickfix", fqClassName); } @NotNull @Override public String getFamilyName() { return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.weaken.type.family"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { final PsiElement element = descriptor.getPsiElement(); final PsiElement parent = element.getParent(); final PsiTypeElement typeElement; if (parent instanceof PsiVariable) { final PsiVariable variable = (PsiVariable)parent; typeElement = variable.getTypeElement(); } else if (parent instanceof PsiMethod) { final PsiMethod method = (PsiMethod)parent; typeElement = method.getReturnTypeElement(); } else { return; } if (typeElement == null) { return; } final PsiJavaCodeReferenceElement componentReferenceElement = typeElement.getInnermostComponentReferenceElement(); boolean isInferredType = typeElement.isInferredType(); if (componentReferenceElement == null && !isInferredType) { return; } final PsiType oldType = typeElement.getType(); if (!(oldType instanceof PsiClassType)) { return; } final PsiClassType oldClassType = (PsiClassType)oldType; final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiElementFactory factory = facade.getElementFactory(); final PsiType type = factory.createTypeFromText(fqClassName, element); if (!(type instanceof PsiClassType)) { return; } PsiClassType classType = (PsiClassType)type; final PsiClass aClass = classType.resolve(); if (aClass != null) { final PsiTypeParameter[] typeParameters = aClass.getTypeParameters(); if (typeParameters.length != 0) { PsiClass newClass = classType.resolve(); if (newClass == null) return; final Map<PsiTypeParameter, PsiType> typeParameterMap = new HashMap<>(); for (int i = 0; i < typeParameters.length; i++) { final PsiTypeParameter typeParameter = typeParameters[i]; final PsiType parameterType = PsiUtil.substituteTypeParameter(oldClassType, newClass, i, false); typeParameterMap.put(typeParameter, parameterType); } final PsiSubstitutor substitutor = factory.createSubstitutor(typeParameterMap); classType = factory.createType(aClass, substitutor); } } final PsiElement replacement; if (isInferredType) { PsiTypeElement newTypeElement = factory.createTypeElement(classType); replacement = new CommentTracker().replaceAndRestoreComments(typeElement, newTypeElement); } else { final PsiJavaCodeReferenceElement referenceElement = factory.createReferenceElementByType(classType); replacement = new CommentTracker().replaceAndRestoreComments(componentReferenceElement, referenceElement); } final JavaCodeStyleManager javaCodeStyleManager = JavaCodeStyleManager.getInstance(project); javaCodeStyleManager.shortenClassReferences(replacement); } } @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly, @NotNull LocalInspectionToolSession session) { return new TypeMayBeWeakenedVisitor(holder, isOnTheFly); } @NotNull private static PsiClass tryReplaceWithParentStopper(@NotNull PsiClass fromIncl, @NotNull PsiClass toIncl, @NotNull Collection<String> stopClasses) { for (PsiClass superClass : InheritanceUtil.getSuperClasses(fromIncl)) { if (!superClass.isInheritor(toIncl, true)) continue; if (stopClasses.contains(getClassName(superClass))) { return superClass; } } return toIncl; } private class TypeMayBeWeakenedVisitor extends JavaElementVisitor { private final ProblemsHolder myHolder; private final boolean myIsOnTheFly; TypeMayBeWeakenedVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) { myHolder = holder; myIsOnTheFly = isOnTheFly; } @Override public void visitVariable(PsiVariable variable) { super.visitVariable(variable); if (variable instanceof PsiParameter) { final PsiParameter parameter = (PsiParameter)variable; if (parameter instanceof PsiPatternVariable) return; final PsiElement declarationScope = parameter.getDeclarationScope(); if (declarationScope instanceof PsiCatchSection) { // do not weaken catch block parameters return; } if (declarationScope instanceof PsiLambdaExpression && parameter.getTypeElement() == null) { //no need to check inferred lambda params return; } if (declarationScope instanceof PsiMethod) { final PsiMethod method = (PsiMethod)declarationScope; final PsiClass containingClass = method.getContainingClass(); if (containingClass == null || containingClass.isInterface()) { return; } if (JavaHighlightUtil.isSerializationRelatedMethod(method, containingClass)) { return; } if (MethodUtils.hasSuper(method)) { // do not try to weaken parameters of methods with // super methods return; } final Query<PsiMethod> overridingSearch = OverridingMethodsSearch.search(method); if (overridingSearch.findFirst() != null) { // do not try to weaken parameters of methods with // overriding methods. return; } } } if (myIsOnTheFly && variable instanceof PsiField) { // checking variables with greater visibility is too expensive // for error checking in the editor if (!variable.hasModifierProperty(PsiModifier.PRIVATE)) { return; } } if (doNotWeakenInferredVariableType) { PsiTypeElement typeElement = variable.getTypeElement(); if (typeElement != null && typeElement.isInferredType()) { return; } } if (useRighthandTypeAsWeakestTypeInAssignments) { if (variable instanceof PsiParameter) { final PsiElement parent = variable.getParent(); if (parent instanceof PsiForeachStatement) { final PsiForeachStatement foreachStatement = (PsiForeachStatement)parent; final PsiExpression iteratedValue = foreachStatement.getIteratedValue(); if (!(iteratedValue instanceof PsiNewExpression) && !(iteratedValue instanceof PsiTypeCastExpression)) { return; } } } else { final PsiExpression initializer = variable.getInitializer(); if (!(initializer instanceof PsiNewExpression) && !(initializer instanceof PsiTypeCastExpression)) { return; } } } if (variable instanceof PsiParameter) { PsiMethod method = PsiTreeUtil.getParentOfType(variable, PsiMethod.class); if (method == null || UnusedSymbolUtil.isImplicitUsage(variable.getProject(), method)) return; } if (UnusedSymbolUtil.isImplicitWrite(variable) || UnusedSymbolUtil.isImplicitRead(variable)) { return; } PsiClassType classType = ObjectUtils.tryCast(variable.getType(), PsiClassType.class); if (classType == null) return; PsiClass originClass = classType.resolve(); if (originClass == null) return; if (myStopClassSet.contains(getClassName(originClass))) return; Collection<PsiClass> weakestClasses = computeWeakestClasses(variable, originClass); if (weakestClasses.isEmpty()) { return; } PsiIdentifier nameIdentifier = variable.getNameIdentifier(); if (nameIdentifier == null) return; registerProblem(nameIdentifier, variable, originClass, weakestClasses); } @Override public void visitMethod(PsiMethod method) { super.visitMethod(method); if (doNotWeakenReturnType) return; if (myIsOnTheFly && !method.hasModifierProperty(PsiModifier.PRIVATE) && !ApplicationManager.getApplication().isUnitTestMode()) { // checking methods with greater visibility is too expensive. // for error checking in the editor return; } if (MethodUtils.hasSuper(method)) { // do not try to weaken methods with super methods return; } final Query<PsiMethod> overridingSearch = OverridingMethodsSearch.search(method); if (overridingSearch.findFirst() != null) { // do not try to weaken methods with overriding methods. return; } PsiClassType classType = ObjectUtils.tryCast(method.getReturnType(), PsiClassType.class); if (classType == null) return; PsiClass originClass = classType.resolve(); if (originClass == null) return; if (myStopClassSet.contains(getClassName(originClass))) return; Collection<PsiClass> weakestClasses = computeWeakestClasses(method, originClass); if (weakestClasses.isEmpty()) return; PsiIdentifier identifier = method.getNameIdentifier(); if (identifier == null) return; registerProblem(identifier, method, originClass, weakestClasses); } @NotNull private Collection<PsiClass> computeWeakestClasses(@NotNull PsiElement element, @NotNull PsiClass originClass) { Collection<PsiClass> weakestClasses = WeakestTypeFinder.calculateWeakestClassesNecessary(element, useRighthandTypeAsWeakestTypeInAssignments, useParameterizedTypeForCollectionMethods); if (doNotWeakenToJavaLangObject) { weakestClasses.remove(ClassUtils.findObjectClass(element)); } if (onlyWeakentoInterface) { weakestClasses.removeIf(weakestClass -> !weakestClass.isInterface()); } weakestClasses = ContainerUtil.map(weakestClasses, psiClass -> tryReplaceWithParentStopper(originClass, psiClass, myStopClassSet)); return weakestClasses; } private void registerProblem(@NotNull PsiElement psiElement, @NotNull PsiElement element, @NotNull PsiClass originalClass, @NotNull Collection<PsiClass> weakerClasses) { final Collection<LocalQuickFix> fixes = new ArrayList<>(); if (element instanceof PsiVariable && !doNotWeakenInferredVariableType) { PsiTypeElement typeElement = ((PsiVariable)element).getTypeElement(); if (typeElement != null && typeElement.isInferredType()) { fixes.add(new SetInspectionOptionFix(TypeMayBeWeakenedInspection.this, "doNotWeakenInferredVariableType", InspectionGadgetsBundle .message("inspection.type.may.be.weakened.do.not.weaken.inferred.variable.type"), true)); } } for (PsiClass weakestClass : weakerClasses) { final String className = getClassName(weakestClass); if (className == null) { continue; } fixes.add(new TypeMayBeWeakenedFix(className)); List<String> candidates = getInheritors(originalClass, weakestClass); candidates.removeAll(myStopClassSet); if (!candidates.isEmpty() && (myIsOnTheFly || candidates.size() == 1)) { fixes.add(new AddStopWordQuickfix(candidates)); // not this class name, but all superclass names excluding this } } myHolder.registerProblem(psiElement, getDescription(element, weakerClasses), fixes.toArray(LocalQuickFix.EMPTY_ARRAY)); } @NotNull private @InspectionMessage String getDescription(@NotNull PsiElement element, @NotNull Collection<PsiClass> weakerClasses) { @NonNls final StringBuilder builder = new StringBuilder(); final Iterator<PsiClass> iterator = weakerClasses.iterator(); if (iterator.hasNext()) { builder.append('\'').append(getClassName(iterator.next())).append('\''); while (iterator.hasNext()) { builder.append(", '").append(getClassName(iterator.next())).append('\''); } } if (element instanceof PsiField) { return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.field.problem.descriptor", builder.toString()); } if (element instanceof PsiParameter) { return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.parameter.problem.descriptor", builder.toString()); } if (element instanceof PsiMethod) { return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.method.problem.descriptor", builder.toString()); } return InspectionGadgetsBundle.message("inspection.type.may.be.weakened.problem.descriptor", builder.toString()); } } @NotNull private static List<String> getInheritors(@NotNull PsiClass from, @NotNull PsiClass to) { List<String> candidates = new ArrayList<>(); String fromName = getClassName(from); if (fromName != null) { candidates.add(fromName); } for (PsiClass cls : InheritanceUtil.getSuperClasses(from)) { if (cls.isInheritor(to, true)) { String name = getClassName(cls); if (name == null) continue; candidates.add(name); } } return candidates; } }
apache-2.0
whitingjr/JbossWeb_7_2_0
src/main/java/org/apache/jasper/servlet/JspServletWrapper.java
17211
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jasper.servlet; import static org.jboss.web.JasperMessages.MESSAGES; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import javax.servlet.Servlet; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.SingleThreadModel; import javax.servlet.UnavailableException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.tagext.TagInfo; import org.apache.jasper.JasperException; import org.apache.jasper.JspCompilationContext; import org.apache.jasper.Options; import org.apache.jasper.compiler.ErrorDispatcher; import org.apache.jasper.compiler.JavacErrorDetail; import org.apache.jasper.compiler.JspRuntimeContext; import org.apache.jasper.runtime.InstanceManagerFactory; import org.apache.jasper.runtime.JspSourceDependent; import org.apache.tomcat.InstanceManager; import org.jboss.web.JasperLogger; /** * The JSP engine (a.k.a Jasper). * * The servlet container is responsible for providing a * URLClassLoader for the web application context Jasper * is being used in. Jasper will try get the Tomcat * ServletContext attribute for its ServletContext class * loader, if that fails, it uses the parent class loader. * In either case, it must be a URLClassLoader. * * @author Anil K. Vijendran * @author Harish Prabandham * @author Remy Maucherat * @author Kin-man Chung * @author Glenn Nielsen * @author Tim Fennell */ public class JspServletWrapper { private Servlet theServlet; private String jspUri; private Class tagHandlerClass; private JspCompilationContext ctxt; private long available = 0L; private ServletConfig config; private Options options; private boolean firstTime = true; private boolean reload = true; private boolean isTagFile; private int tripCount; private JasperException compileException; private long servletClassLastModifiedTime; private long lastModificationTest = 0L; /* * JspServletWrapper for JSP pages. */ public JspServletWrapper(ServletConfig config, Options options, String jspUri, boolean isErrorPage, JspRuntimeContext rctxt) throws JasperException { this.isTagFile = false; this.config = config; this.options = options; this.jspUri = jspUri; ctxt = new JspCompilationContext(jspUri, isErrorPage, options, config.getServletContext(), this, rctxt); } /* * JspServletWrapper for tag files. */ public JspServletWrapper(ServletContext servletContext, Options options, String tagFilePath, TagInfo tagInfo, JspRuntimeContext rctxt, URL tagFileJarUrl) throws JasperException { this.isTagFile = true; this.config = null; // not used this.options = options; this.jspUri = tagFilePath; this.tripCount = 0; ctxt = new JspCompilationContext(jspUri, tagInfo, options, servletContext, this, rctxt, tagFileJarUrl); } public JspCompilationContext getJspEngineContext() { return ctxt; } public void setReload(boolean reload) { this.reload = reload; } public Servlet getServlet() throws ServletException, IOException, FileNotFoundException { if (reload) { synchronized (this) { // Synchronizing on jsw enables simultaneous loading // of different pages, but not the same page. if (reload) { // This is to maintain the original protocol. destroy(); Servlet servlet = null; try { InstanceManager instanceManager = InstanceManagerFactory.getInstanceManager(config); servlet = (Servlet) instanceManager.newInstance(ctxt.getFQCN(), ctxt.getJspLoader()); } catch (IllegalAccessException e) { throw new JasperException(e); } catch (InstantiationException e) { throw new JasperException(e); } catch (Exception e) { throw new JasperException(e); } servlet.init(config); if (!firstTime) { ctxt.getRuntimeContext().incrementJspReloadCount(); } theServlet = servlet; reload = false; } } } return theServlet; } public ServletContext getServletContext() { return ctxt.getServletContext(); } /** * Sets the compilation exception for this JspServletWrapper. * * @param je The compilation exception */ public void setCompilationException(JasperException je) { this.compileException = je; } /** * Sets the last-modified time of the servlet class file associated with * this JspServletWrapper. * * @param lastModified Last-modified time of servlet class */ public void setServletClassLastModifiedTime(long lastModified) { if (this.servletClassLastModifiedTime < lastModified) { synchronized (this) { if (this.servletClassLastModifiedTime < lastModified) { this.servletClassLastModifiedTime = lastModified; reload = true; } } } } /** * Compile (if needed) and load a tag file */ public Class loadTagFile() throws JasperException { try { if (ctxt.isRemoved()) { throw new FileNotFoundException(jspUri); } if (options.getDevelopment() || firstTime ) { synchronized (this) { firstTime = false; ctxt.compile(); } } else { if (compileException != null) { throw compileException; } } if (reload) { tagHandlerClass = ctxt.load(); reload = false; } } catch (FileNotFoundException ex) { throw new JasperException(ex); } return tagHandlerClass; } /** * Compile and load a prototype for the Tag file. This is needed * when compiling tag files with circular dependencies. A prototype * (skeleton) with no dependencies on other other tag files is * generated and compiled. */ public Class loadTagFilePrototype() throws JasperException { ctxt.setPrototypeMode(true); try { return loadTagFile(); } finally { ctxt.setPrototypeMode(false); } } /** * Get a list of files that the current page has source dependency on. */ public java.util.List getDependants() { try { Object target; if (isTagFile) { if (reload) { tagHandlerClass = ctxt.load(); reload = false; } target = tagHandlerClass.newInstance(); } else { target = getServlet(); } if (target != null && target instanceof JspSourceDependent) { return ((java.util.List) ((JspSourceDependent) target).getDependants()); } } catch (Throwable ex) { } return null; } public boolean isTagFile() { return this.isTagFile; } public int incTripCount() { return tripCount++; } public int decTripCount() { return tripCount--; } public void service(HttpServletRequest request, HttpServletResponse response, boolean precompile) throws ServletException, IOException, FileNotFoundException { try { if (ctxt.isRemoved()) { throw new FileNotFoundException(jspUri); } if ((available > 0L) && (available < Long.MAX_VALUE)) { if (available > System.currentTimeMillis()) { response.setDateHeader("Retry-After", available); response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, MESSAGES.unavailable()); return; } else { // Wait period has expired. Reset. available = 0; } } /* * (1) Compile */ if (options.getDevelopment() || firstTime ) { synchronized (this) { firstTime = false; // The following sets reload to true, if necessary ctxt.compile(); } } else { if (compileException != null) { // Throw cached compilation exception throw compileException; } } /* * (2) (Re)load servlet class file */ getServlet(); // If a page is to be precompiled only, return. if (precompile) { return; } } catch (ServletException ex) { if (options.getDevelopment()) { throw handleJspException(ex); } else { throw ex; } } catch (IOException ex) { if (options.getDevelopment()) { throw handleJspException(ex); } else { throw ex; } } catch (IllegalStateException ex) { if (options.getDevelopment()) { throw handleJspException(ex); } else { throw ex; } } catch (Exception ex) { if (options.getDevelopment()) { throw handleJspException(ex); } else { throw new JasperException(ex); } } try { /* * (3) Service request */ if (theServlet instanceof SingleThreadModel) { // sync on the wrapper so that the freshness // of the page is determined right before servicing synchronized (this) { theServlet.service(request, response); } } else { theServlet.service(request, response); } } catch (UnavailableException ex) { String includeRequestUri = (String) request.getAttribute("javax.servlet.include.request_uri"); if (includeRequestUri != null) { // This file was included. Throw an exception as // a response.sendError() will be ignored by the // servlet engine. throw ex; } else { int unavailableSeconds = ex.getUnavailableSeconds(); if (unavailableSeconds <= 0) { unavailableSeconds = 60; // Arbitrary default } available = System.currentTimeMillis() + (unavailableSeconds * 1000L); response.sendError (HttpServletResponse.SC_SERVICE_UNAVAILABLE, ex.getMessage()); } } catch (ServletException ex) { if(options.getDevelopment()) { throw handleJspException(ex); } else { throw ex; } } catch (IOException ex) { if(options.getDevelopment()) { throw handleJspException(ex); } else { throw ex; } } catch (IllegalStateException ex) { if(options.getDevelopment()) { throw handleJspException(ex); } else { throw ex; } } catch (Exception ex) { if(options.getDevelopment()) { throw handleJspException(ex); } else { throw new JasperException(ex); } } } public void destroy() { if (theServlet != null) { theServlet.destroy(); InstanceManager instanceManager = InstanceManagerFactory.getInstanceManager(config); try { instanceManager.destroyInstance(theServlet); } catch (Exception e) { // Log any exception, since it can't be passed along JasperLogger.SERVLET_LOGGER.errorDestroyingServletInstance(e); } } } /** * @return Returns the lastModificationTest. */ public long getLastModificationTest() { return lastModificationTest; } /** * @param lastModificationTest The lastModificationTest to set. */ public void setLastModificationTest(long lastModificationTest) { this.lastModificationTest = lastModificationTest; } /** * <p>Attempts to construct a JasperException that contains helpful information * about what went wrong. Uses the JSP compiler system to translate the line * number in the generated servlet that originated the exception to a line * number in the JSP. Then constructs an exception containing that * information, and a snippet of the JSP to help debugging. * Please see http://issues.apache.org/bugzilla/show_bug.cgi?id=37062 and * http://www.tfenne.com/jasper/ for more details. *</p> * * @param ex the exception that was the cause of the problem. * @return a JasperException with more detailed information */ protected JasperException handleJspException(Exception ex) { try { Throwable realException = ex; if (ex instanceof ServletException) { realException = ((ServletException) ex).getRootCause(); } // First identify the stack frame in the trace that represents the JSP StackTraceElement[] frames = realException.getStackTrace(); StackTraceElement jspFrame = null; for (int i=0; i<frames.length; ++i) { if ( frames[i].getClassName().equals(this.getServlet().getClass().getName()) ) { jspFrame = frames[i]; break; } } if (jspFrame == null || ctxt.getCompiler().getPageNodes() == null) { // If we couldn't find a frame in the stack trace corresponding // to the generated servlet class or we don't have a copy of the // parsed JSP to hand, we can't really add anything return new JasperException(ex); } else { int javaLineNumber = jspFrame.getLineNumber(); JavacErrorDetail detail = ErrorDispatcher.createJavacError( jspFrame.getMethodName(), this.ctxt.getCompiler().getPageNodes(), null, javaLineNumber, ctxt); // If the line number is less than one we couldn't find out // where in the JSP things went wrong int jspLineNumber = detail.getJspBeginLineNumber(); if (jspLineNumber < 1) { throw new JasperException(ex); } if (options.getDisplaySourceFragment()) { return new JasperException(MESSAGES.jspExceptionWithDetails(detail.getJspFileName(), jspLineNumber, detail.getJspExtract()), ex); } else { return new JasperException(MESSAGES.jspException(detail.getJspFileName(), jspLineNumber), ex); } } } catch (Exception je) { // If anything goes wrong, just revert to the original behaviour if (ex instanceof JasperException) { return (JasperException) ex; } else { return new JasperException(ex); } } } }
apache-2.0
jhrcek/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-client/kie-wb-common-stunner-widgets/src/main/java/org/kie/workbench/common/stunner/client/widgets/event/LoadDiagramEvent.java
1323
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.client.widgets.event; import org.uberfire.workbench.events.UberFireEvent; /** * <p>Event when a load diagram operation is requested.</p> */ public final class LoadDiagramEvent implements UberFireEvent { private final String uri; private final String name; public LoadDiagramEvent(final String uri, final String name) { this.name = name; this.uri = uri; } public String getURI() { return uri; } public String getName() { return name; } @Override public String toString() { return "LoadDiagramEvent [name=" + name + ", uri=" + uri + "]"; } }
apache-2.0
ifunny/SmoothProgressBar
library-circular/src/main/java/fr.castorflex.android.circularprogressbar/DefaultDelegate.java
8862
package fr.castorflex.android.circularprogressbar; import android.animation.Animator; import android.animation.ArgbEvaluator; import android.animation.ValueAnimator; import android.graphics.Canvas; import android.graphics.Paint; import android.support.annotation.NonNull; import android.view.animation.Interpolator; import android.view.animation.LinearInterpolator; import static fr.castorflex.android.circularprogressbar.Utils.getAnimatedFraction; class DefaultDelegate implements PBDelegate { private static final ArgbEvaluator COLOR_EVALUATOR = new ArgbEvaluator(); private static final Interpolator END_INTERPOLATOR = new LinearInterpolator(); private static final long ROTATION_ANIMATOR_DURATION = 2000; private static final long SWEEP_ANIMATOR_DURATION = 600; private static final long END_ANIMATOR_DURATION = 200; private ValueAnimator mSweepAppearingAnimator; private ValueAnimator mSweepDisappearingAnimator; private ValueAnimator mRotationAnimator; private ValueAnimator mEndAnimator; private boolean mModeAppearing; private int mCurrentColor; private int mCurrentIndexColor; private float mCurrentSweepAngle; private float mCurrentRotationAngleOffset = 0; private float mCurrentRotationAngle = 0; private float mCurrentEndRatio = 1f; private boolean mFirstSweepAnimation; //params private Interpolator mAngleInterpolator; private Interpolator mSweepInterpolator; private int[] mColors; private float mSweepSpeed; private float mRotationSpeed; private int mMinSweepAngle; private int mMaxSweepAngle; private CircularProgressDrawable mParent; private CircularProgressDrawable.OnEndListener mOnEndListener; public DefaultDelegate(@NonNull CircularProgressDrawable parent, @NonNull Options options) { mParent = parent; mSweepInterpolator = options.sweepInterpolator; mAngleInterpolator = options.angleInterpolator; mCurrentIndexColor = 0; mColors = options.colors; mCurrentColor = mColors[0]; mSweepSpeed = options.sweepSpeed; mRotationSpeed = options.rotationSpeed; mMinSweepAngle = options.minSweepAngle; mMaxSweepAngle = options.maxSweepAngle; setupAnimations(); } private void reinitValues() { mFirstSweepAnimation = true; mCurrentEndRatio = 1f; mParent.getCurrentPaint().setColor(mCurrentColor); } @Override public void draw(Canvas canvas, Paint paint) { float startAngle = mCurrentRotationAngle - mCurrentRotationAngleOffset; float sweepAngle = mCurrentSweepAngle; if (!mModeAppearing) { startAngle = startAngle + (360 - sweepAngle); } startAngle %= 360; if (mCurrentEndRatio < 1f) { float newSweepAngle = sweepAngle * mCurrentEndRatio; startAngle = (startAngle + (sweepAngle - newSweepAngle)) % 360; sweepAngle = newSweepAngle; } canvas.drawArc(mParent.getDrawableBounds(), startAngle, sweepAngle, false, paint); } @Override public void start() { mEndAnimator.cancel(); reinitValues(); mRotationAnimator.start(); mSweepAppearingAnimator.start(); } @Override public void stop() { stopAnimators(); } private void stopAnimators() { mRotationAnimator.cancel(); mSweepAppearingAnimator.cancel(); mSweepDisappearingAnimator.cancel(); mEndAnimator.cancel(); } private void setAppearing() { mModeAppearing = true; mCurrentRotationAngleOffset += mMinSweepAngle; } private void setDisappearing() { mModeAppearing = false; mCurrentRotationAngleOffset = mCurrentRotationAngleOffset + (360 - mMaxSweepAngle); } public void setCurrentRotationAngle(float currentRotationAngle) { mCurrentRotationAngle = currentRotationAngle; mParent.invalidate(); } public void setCurrentSweepAngle(float currentSweepAngle) { mCurrentSweepAngle = currentSweepAngle; mParent.invalidate(); } private void setEndRatio(float ratio) { mCurrentEndRatio = ratio; mParent.invalidate(); } ////////////////////////////////////////////////////////////////////////////// //////////////// Animation private void setupAnimations() { mRotationAnimator = ValueAnimator.ofFloat(0f, 360f); mRotationAnimator.setInterpolator(mAngleInterpolator); mRotationAnimator.setDuration((long) (ROTATION_ANIMATOR_DURATION / mRotationSpeed)); mRotationAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { float angle = getAnimatedFraction(animation) * 360f; setCurrentRotationAngle(angle); } }); mRotationAnimator.setRepeatCount(ValueAnimator.INFINITE); mRotationAnimator.setRepeatMode(ValueAnimator.RESTART); mSweepAppearingAnimator = ValueAnimator.ofFloat(mMinSweepAngle, mMaxSweepAngle); mSweepAppearingAnimator.setInterpolator(mSweepInterpolator); mSweepAppearingAnimator.setDuration((long) (SWEEP_ANIMATOR_DURATION / mSweepSpeed)); mSweepAppearingAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { float animatedFraction = getAnimatedFraction(animation); float angle; if (mFirstSweepAnimation) { angle = animatedFraction * mMaxSweepAngle; } else { angle = mMinSweepAngle + animatedFraction * (mMaxSweepAngle - mMinSweepAngle); } setCurrentSweepAngle(angle); } }); mSweepAppearingAnimator.addListener(new SimpleAnimatorListener() { @Override public void onAnimationStart(Animator animation) { super.onAnimationStart(animation); mModeAppearing = true; } @Override protected void onPreAnimationEnd(Animator animation) { if (isStartedAndNotCancelled()) { mFirstSweepAnimation = false; setDisappearing(); mSweepDisappearingAnimator.start(); } } }); mSweepDisappearingAnimator = ValueAnimator.ofFloat(mMaxSweepAngle, mMinSweepAngle); mSweepDisappearingAnimator.setInterpolator(mSweepInterpolator); mSweepDisappearingAnimator.setDuration((long) (SWEEP_ANIMATOR_DURATION / mSweepSpeed)); mSweepDisappearingAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { float animatedFraction = getAnimatedFraction(animation); setCurrentSweepAngle(mMaxSweepAngle - animatedFraction * (mMaxSweepAngle - mMinSweepAngle)); long duration = animation.getDuration(); long played = animation.getCurrentPlayTime(); float fraction = (float) played / duration; if (mColors.length > 1 && fraction > .7f) { //because int prevColor = mCurrentColor; int nextColor = mColors[(mCurrentIndexColor + 1) % mColors.length]; int newColor = (Integer) COLOR_EVALUATOR.evaluate((fraction - .7f) / (1 - .7f), prevColor, nextColor); mParent.getCurrentPaint().setColor(newColor); } } }); mSweepDisappearingAnimator.addListener(new SimpleAnimatorListener() { @Override protected void onPreAnimationEnd(Animator animation) { if (isStartedAndNotCancelled()) { setAppearing(); mCurrentIndexColor = (mCurrentIndexColor + 1) % mColors.length; mCurrentColor = mColors[mCurrentIndexColor]; mParent.getCurrentPaint().setColor(mCurrentColor); mSweepAppearingAnimator.start(); } } }); mEndAnimator = ValueAnimator.ofFloat(1f, 0f); mEndAnimator.setInterpolator(END_INTERPOLATOR); mEndAnimator.setDuration(END_ANIMATOR_DURATION); mEndAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { setEndRatio(1f - getAnimatedFraction(animation)); } }); } ///////////////////////////////////////////////////////// /// Stop ///////////////////////////////////////////////////////// @Override public void progressiveStop(CircularProgressDrawable.OnEndListener listener) { if (!mParent.isRunning() || mEndAnimator.isRunning()) { return; } mOnEndListener = listener; mEndAnimator.addListener(new SimpleAnimatorListener() { @Override public void onPreAnimationEnd(Animator animation) { mEndAnimator.removeListener(this); CircularProgressDrawable.OnEndListener endListener = mOnEndListener; mOnEndListener = null; if(isStartedAndNotCancelled()) { setEndRatio(0f); mParent.stop(); if (endListener != null) { endListener.onEnd(mParent); } } } }); mEndAnimator.start(); } }
apache-2.0
dbrimley/hazelcast
hazelcast/src/test/java/com/hazelcast/query/impl/DefaultArgumentParserTest.java
1623
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.query.impl; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class DefaultArgumentParserTest { private final DefaultArgumentParser parser = new DefaultArgumentParser(); @Test public void passThrough_correctArgument() { // WHEN Object arguments = parser.parse("123"); // THEN assertThat((String) arguments, equalTo("123")); } @Test public void passThrough_null() { // WHEN Object arguments = parser.parse(null); // THEN assertThat(arguments, equalTo(null)); } }
apache-2.0
lasombra/rhiot
gateway/components/camel-gpsd/src/test/java/io/rhiot/component/gpsd/GpsdComponentIntegrationTest.java
4275
/** * Licensed to the Rhiot under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rhiot.component.gpsd; import de.taimos.gpsd4java.types.TPVObject; import io.rhiot.scanner.Device; import io.rhiot.scanner.DeviceDetector; import io.rhiot.scanner.SimplePortScanningDeviceDetector; import io.rhiot.utils.Networks; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.concurrent.TimeUnit; import static java.lang.Boolean.parseBoolean; import static java.lang.System.getenv; import static org.junit.Assume.assumeFalse; import static org.junit.Assume.assumeTrue; import static io.rhiot.utils.Properties.booleanProperty; /** * This test detects the Raspberry Pi on the network and consumes GPSD messages from the socket. * NB GPSD on the Raspberry Pi must be listening on all interfaces, ie gpsd -G /dev/ttyUSB0 , the default is private. */ public class GpsdComponentIntegrationTest extends CamelTestSupport { private static final Logger LOG = LoggerFactory.getLogger(GpsdComponentIntegrationTest.class); static DeviceDetector deviceDetector = new SimplePortScanningDeviceDetector(); static String piAddress; static List<Device> devices; static boolean isRpiAvailable; @BeforeClass public static void beforeClass() { assumeFalse(parseBoolean(getenv("IS_TRAVIS"))); devices = deviceDetector.detectDevices(); piAddress = devices.size() == 1 ? devices.get(0).address().getHostAddress() : "localhost"; isRpiAvailable = devices.size() == 1 && devices.get(0).type().equals(Device.DEVICE_RASPBERRY_PI_2) && Networks.available(piAddress, GpsdConstants.DEFAULT_PORT); //If Pi is available and the default GPSD port is open then test that if (isRpiAvailable) { LOG.debug("Pi is available to test"); } else { //Otherwise assume the test is explicitly set to run if the port is available assumeTrue(booleanProperty("RUN_GPS_INTEGRATION_TESTS", false)); assumeTrue("GPSD port is expected to be available", Networks.available(GpsdConstants.DEFAULT_PORT)); } } @AfterClass public static void afterClass() { deviceDetector.close(); } @Test public void testGpsd() throws Exception { MockEndpoint mock = getMockEndpoint("mock:foo"); mock.expectedMinimumMessageCount(9); //Should get at least 9 messages within 10 seconds assertMockEndpointsSatisfied(10, TimeUnit.SECONDS); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() { from("gpsd://gpsSpeedTest?host=" + piAddress).routeId("gpsdSpeed") .process(exchange -> { TPVObject tpvObject = exchange.getIn().getHeader(GpsdConstants.TPV_HEADER, TPVObject.class); if (tpvObject.getSpeed() > 0) { log.warn("Moving at [{}] meters/second, course [{}]", tpvObject.getSpeed(), tpvObject.getCourse()); } else { log.info("GPS is stationary"); } }).to("mock:foo") ; } }; } }
apache-2.0
GabrielBrascher/cloudstack
engine/schema/src/main/java/com/cloud/upgrade/dao/Upgrade451to452.java
2207
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.upgrade.dao; import com.cloud.utils.exception.CloudRuntimeException; import java.io.InputStream; import java.sql.Connection; public class Upgrade451to452 implements DbUpgrade { @Override public String[] getUpgradableVersionRange() { return new String[] {"4.5.1", "4.5.2"}; } @Override public String getUpgradedVersion() { return "4.5.2"; } @Override public boolean supportsRollingUpgrade() { return false; } @Override public InputStream[] getPrepareScripts() { final String scriptFile = "META-INF/db/schema-451to452.sql"; final InputStream script = Thread.currentThread().getContextClassLoader().getResourceAsStream(scriptFile); if (script == null) { throw new CloudRuntimeException("Unable to find " + scriptFile); } return new InputStream[] {script}; } @Override public void performDataMigration(Connection conn) { } @Override public InputStream[] getCleanupScripts() { final String scriptFile = "META-INF/db/schema-451to452-cleanup.sql"; final InputStream script = Thread.currentThread().getContextClassLoader().getResourceAsStream(scriptFile); if (script == null) { throw new CloudRuntimeException("Unable to find " + scriptFile); } return new InputStream[] {script}; } }
apache-2.0
s1monw/elasticsearch
server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java
10603
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.shard; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.resync.ResyncReplicationResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.tasks.TaskManager; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.core.IsInstanceOf.instanceOf; public class PrimaryReplicaSyncerTests extends IndexShardTestCase { public void testSyncerSendsOffCorrectDocuments() throws Exception { IndexShard shard = newStartedShard(true); TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); AtomicBoolean syncActionCalled = new AtomicBoolean(); PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { logger.info("Sending off {} operations", request.getOperations().length); syncActionCalled.set(true); assertThat(parentTask, instanceOf(PrimaryReplicaSyncer.ResyncTask.class)); listener.onResponse(new ResyncReplicationResponse()); }; PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, taskManager, syncAction); syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 100))); int numDocs = randomInt(10); for (int i = 0; i < numDocs; i++) { // Index doc but not advance local checkpoint. shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source(shard.shardId().getIndexName(), "test", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, getMappingUpdater(shard, "test")); } long globalCheckPoint = numDocs > 0 ? randomIntBetween(0, numDocs - 1) : 0; boolean syncNeeded = numDocs > 0 && globalCheckPoint < numDocs - 1; String allocationId = shard.routingEntry().allocationId().getId(); shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build(), Collections.emptySet()); shard.updateLocalCheckpointForShard(allocationId, globalCheckPoint); assertEquals(globalCheckPoint, shard.getGlobalCheckpoint()); logger.info("Total ops: {}, global checkpoint: {}", numDocs, globalCheckPoint); PlainActionFuture<PrimaryReplicaSyncer.ResyncTask> fut = new PlainActionFuture<>(); syncer.resync(shard, fut); fut.get(); if (syncNeeded) { assertTrue("Sync action was not called", syncActionCalled.get()); } assertEquals(globalCheckPoint == numDocs - 1 ? 0 : numDocs, fut.get().getTotalOperations()); if (syncNeeded) { long skippedOps = globalCheckPoint + 1; // everything up to global checkpoint included assertEquals(skippedOps, fut.get().getSkippedOperations()); assertEquals(numDocs - skippedOps, fut.get().getResyncedOperations()); } else { assertEquals(0, fut.get().getSkippedOperations()); assertEquals(0, fut.get().getResyncedOperations()); } closeShards(shard); } public void testSyncerOnClosingShard() throws Exception { IndexShard shard = newStartedShard(true); AtomicBoolean syncActionCalled = new AtomicBoolean(); CountDownLatch syncCalledLatch = new CountDownLatch(1); PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { logger.info("Sending off {} operations", request.getOperations().length); syncActionCalled.set(true); syncCalledLatch.countDown(); threadPool.generic().execute(() -> listener.onResponse(new ResyncReplicationResponse())); }; PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), syncAction); syncer.setChunkSize(new ByteSizeValue(1)); // every document is sent off separately int numDocs = 10; for (int i = 0; i < numDocs; i++) { // Index doc but not advance local checkpoint. shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source(shard.shardId().getIndexName(), "test", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, getMappingUpdater(shard, "test")); } String allocationId = shard.routingEntry().allocationId().getId(); shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build(), Collections.emptySet()); PlainActionFuture<PrimaryReplicaSyncer.ResyncTask> fut = new PlainActionFuture<>(); threadPool.generic().execute(() -> { try { syncer.resync(shard, fut); } catch (AlreadyClosedException ace) { fut.onFailure(ace); } }); if (randomBoolean()) { syncCalledLatch.await(); } closeShards(shard); try { fut.actionGet(); assertTrue("Sync action was not called", syncActionCalled.get()); } catch (AlreadyClosedException | IndexShardClosedException ignored) { // ignore } } public void testStatusSerialization() throws IOException { PrimaryReplicaSyncer.ResyncTask.Status status = new PrimaryReplicaSyncer.ResyncTask.Status(randomAlphaOfLength(10), randomIntBetween(0, 1000), randomIntBetween(0, 1000), randomIntBetween(0, 1000)); final BytesStreamOutput out = new BytesStreamOutput(); status.writeTo(out); final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytesRef().bytes)); PrimaryReplicaSyncer.ResyncTask.Status serializedStatus = new PrimaryReplicaSyncer.ResyncTask.Status(in); assertEquals(status, serializedStatus); } public void testStatusEquals() throws IOException { PrimaryReplicaSyncer.ResyncTask task = new PrimaryReplicaSyncer.ResyncTask(0, "type", "action", "desc", null, Collections.emptyMap()); task.setPhase(randomAlphaOfLength(10)); task.setResyncedOperations(randomIntBetween(0, 1000)); task.setTotalOperations(randomIntBetween(0, 1000)); task.setSkippedOperations(randomIntBetween(0, 1000)); PrimaryReplicaSyncer.ResyncTask.Status status = task.getStatus(); PrimaryReplicaSyncer.ResyncTask.Status sameStatus = task.getStatus(); assertNotSame(status, sameStatus); assertEquals(status, sameStatus); assertEquals(status.hashCode(), sameStatus.hashCode()); switch (randomInt(3)) { case 0: task.setPhase("otherPhase"); break; case 1: task.setResyncedOperations(task.getResyncedOperations() + 1); break; case 2: task.setSkippedOperations(task.getSkippedOperations() + 1); break; case 3: task.setTotalOperations(task.getTotalOperations() + 1); break; } PrimaryReplicaSyncer.ResyncTask.Status differentStatus = task.getStatus(); assertNotEquals(status, differentStatus); } public void testStatusReportsCorrectNumbers() throws IOException { PrimaryReplicaSyncer.ResyncTask task = new PrimaryReplicaSyncer.ResyncTask(0, "type", "action", "desc", null, Collections.emptyMap()); task.setPhase(randomAlphaOfLength(10)); task.setResyncedOperations(randomIntBetween(0, 1000)); task.setTotalOperations(randomIntBetween(0, 1000)); task.setSkippedOperations(randomIntBetween(0, 1000)); PrimaryReplicaSyncer.ResyncTask.Status status = task.getStatus(); XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); status.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS); String jsonString = Strings.toString(jsonBuilder); assertThat(jsonString, containsString("\"phase\":\"" + task.getPhase() + "\"")); assertThat(jsonString, containsString("\"totalOperations\":" + task.getTotalOperations())); assertThat(jsonString, containsString("\"resyncedOperations\":" + task.getResyncedOperations())); assertThat(jsonString, containsString("\"skippedOperations\":" + task.getSkippedOperations())); } }
apache-2.0
smecsia/swagger-jaxrs-doclet
swagger-doclet/src/test/resources/fixtures/resourceinheritance/AbstractResource.java
420
package fixtures.resourceinheritance; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; /** * The AbstractResource represents a base resource class */ @SuppressWarnings("javadoc") public abstract class AbstractResource { @GET @Path("{id}") public String getById(@PathParam("id") String id) { return getResourceById(id); } protected abstract String getResourceById(String id); }
apache-2.0
peterl1084/framework
uitest/src/main/java/com/vaadin/tests/components/datefield/DateFields.java
3189
package com.vaadin.tests.components.datefield; import java.time.LocalDate; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import com.vaadin.shared.ui.datefield.DateResolution; import com.vaadin.tests.components.ComponentTestCase; import com.vaadin.ui.Component; import com.vaadin.ui.DateField; @SuppressWarnings("serial") public class DateFields extends ComponentTestCase<DateField> { private static final Locale[] LOCALES = new Locale[] { Locale.US, Locale.TAIWAN, new Locale("fi", "FI") }; @Override protected Class<DateField> getTestClass() { return DateField.class; } @Override protected void initializeComponents() { for (Locale locale : LOCALES) { DateField pd = createPopupDateField("Undefined width", "-1", locale); pd.setId("Locale-" + locale.toString() + "-undefined-wide"); addTestComponent(pd); pd = createPopupDateField("500px width", "500px", locale); pd.setId("Locale-" + locale.toString() + "-500px-wide"); addTestComponent(pd); pd = createPopupDateField("Initially empty", "", locale); pd.setValue(null); pd.setId("Locale-" + locale.toString() + "-initially-empty"); addTestComponent(pd); } } private DateField createPopupDateField(String caption, String width, Locale locale) { DateField pd = new DateField(caption + "(" + locale.toString() + ")"); pd.setWidth(width); pd.setValue(LocalDate.of(1970, 05, 23)); pd.setLocale(locale); pd.setResolution(DateResolution.YEAR); return pd; } @Override protected String getTestDescription() { return "A generic test for PopupDateFields in different configurations"; } @Override protected List<Component> createActions() { List<Component> actions = super.createActions(); actions.add(createResolutionSelectAction()); actions.add(createInputPromptSelectAction()); return actions; } private Component createResolutionSelectAction() { LinkedHashMap<String, DateResolution> options = new LinkedHashMap<>(); options.put("Year", DateResolution.YEAR); options.put("Month", DateResolution.MONTH); options.put("Day", DateResolution.DAY); return createSelectAction("Resolution", options, "Year", (field, value, data) -> field.setResolution(value)); } private Component createInputPromptSelectAction() { LinkedHashMap<String, String> options = new LinkedHashMap<>(); options.put("<none>", null); options.put("Please enter date", "Please enter date"); options.put("åäöÅÄÖ", "åäöÅÄÖ"); return createSelectAction("Input prompt", options, "<none>", new Command<DateField, String>() { @Override public void execute(DateField c, String value, Object data) { c.setPlaceholder(value); } }); } }
apache-2.0