gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* Copyright 2004-2006 University Corporation for Advanced Internet Development, Inc. Copyright 2004-2006 The University Of Chicago Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package edu.internet2.middleware.grouper; import net.sf.hibernate.HibernateException; import net.sf.hibernate.Session; import net.sf.hibernate.SessionFactory; import net.sf.hibernate.Transaction; import net.sf.hibernate.cfg.Configuration; import java.io.InputStream; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Properties; import java.util.Set; /** * Hibernate utility helper class. * <p/> * This code was initially derived from code in the book <i>Hibernate In * Action</i>. * <p/> * @author blair christensen. * @version $Id: HibernateHelper.java,v 1.27 2006/07/14 17:10:54 blair Exp $ */ class GridGrouperHibernateHelper { // PRIVATE CLASS CONSTANTS // private static final Configuration CFG; private static final SessionFactory FACTORY; // STATIC // static { try { // Find the custom configuration file InputStream in = GridGrouperHibernateHelper.class.getResourceAsStream(GrouperConfig.HIBERNATE_CF); Properties p = new Properties(); p.load(in); // And now load all configuration information CFG = new Configuration() .addProperties(p) .addClass(Attribute.class) .addClass(Field.class) .addClass(GrouperSession.class) .addClass(GroupType.class) .addClass(HibernateSubject.class) .addClass(HibernateSubjectAttribute.class) .addClass(Member.class) .addClass(Membership.class) .addClass(Owner.class) .addClass(Settings.class) .addClass(MembershipRequest.class) .addClass(MembershipRequestHistory.class) ; // And finally create our session factory FACTORY = CFG.buildSessionFactory(); } catch (Throwable t) { // Catch *all* the errors String msg = E.HIBERNATE_INIT + t.getMessage(); ErrorLog.fatal(GridGrouperHibernateHelper.class, msg); throw new ExceptionInInitializerError(t); } } // static // PROTECTED CLASS METHODS // // Delete multiple objects in one transaction // @throws HibernateException protected static void delete(Set objects) throws HibernateException { Object err = null; String msg = "delete"; DebugLog.info(GridGrouperHibernateHelper.class, msg + ": will delete " + objects.size()); try { Session hs = GridGrouperHibernateHelper.getSession(); Transaction tx = hs.beginTransaction(); Object o ; Iterator iter = objects.iterator(); try { while (iter.hasNext()) { o = iter.next(); err = o; DebugLog.info(GridGrouperHibernateHelper.class, msg + ": deleting " + o); hs.delete( _getPersistent(hs, o) ); DebugLog.info(GridGrouperHibernateHelper.class, msg + ": deleted " + o); } tx.commit(); } catch (HibernateException eH) { msg += ": unable to delete " + err + ": " + eH.getMessage(); tx.rollback(); throw new HibernateException(msg, eH); } finally { hs.close(); } } catch (HibernateException eH) { msg = E.HIBERNATE + eH.getMessage(); ErrorLog.error(GridGrouperHibernateHelper.class, msg); throw new HibernateException(msg, eH); } DebugLog.info(GridGrouperHibernateHelper.class, msg + ": deleted " + objects.size()); } // protected static void delete(objects) // @return A Hibernate session protected static Session getSession() throws HibernateException { return FACTORY.openSession(); } // protected static Session getSession() // Save an object // @throws HibernateException protected static void save(Object o) throws HibernateException { Set objects = new LinkedHashSet(); objects.add(o); GridGrouperHibernateHelper.save(objects); } // protected static void save(o) // Save multiple objects in one transaction // @throws HibernateException protected static void save(Set objects) throws HibernateException { Object err = null; String msg = "save"; DebugLog.info(GridGrouperHibernateHelper.class, msg + ": will save " + objects.size()); try { Session hs = GridGrouperHibernateHelper.getSession(); Transaction tx = hs.beginTransaction(); Object o; Iterator iter = objects.iterator(); try { while (iter.hasNext()) { o = iter.next(); err = o; DebugLog.info(GridGrouperHibernateHelper.class, msg + ": saving " + o); hs.saveOrUpdate(o); DebugLog.info(GridGrouperHibernateHelper.class, msg + ": saved " + o); } tx.commit(); } catch (HibernateException eH) { msg += ": unable to save " + err + ": " + eH.getMessage(); tx.rollback(); throw new HibernateException(msg, eH); } finally { hs.close(); } } catch (HibernateException eH) { msg = E.HIBERNATE + eH.getMessage(); ErrorLog.error(GridGrouperHibernateHelper.class, msg); throw new HibernateException(msg, eH); } DebugLog.info(GridGrouperHibernateHelper.class, msg + ": saved " + objects.size()); } // protected static void save(objects) protected static void saveAndDelete(Set saves, Set deletes) throws HibernateException { try { Session hs = GridGrouperHibernateHelper.getSession(); Transaction tx = hs.beginTransaction(); Object oD; Object oS; Iterator iterD = deletes.iterator(); Iterator iterS = saves.iterator(); try { while (iterD.hasNext()) { oD = iterD.next(); try { hs.delete( _getPersistent(hs, oD) ); } catch (HibernateException eH) { String msg = E.HIBERNATE + "unable to delete " + oD + ": " + eH.getMessage(); throw new HibernateException(msg, eH); } } while (iterS.hasNext()) { oS = iterS.next(); try { hs.saveOrUpdate(oS); } catch (HibernateException eH) { String msg = E.HIBERNATE + "unable to save " + oS + ": " + eH.getMessage(); throw new HibernateException(msg, eH); } } try { tx.commit(); } catch (HibernateException eH) { String msg = E.HIBERNATE_COMMIT + eH.getMessage(); throw new HibernateException(msg, eH); } } catch (HibernateException eH) { tx.rollback(); throw new HibernateException(eH.getMessage(), eH); } finally { hs.close(); } } catch (HibernateException eH) { String msg = eH.getMessage(); ErrorLog.error(GridGrouperHibernateHelper.class, msg); throw new HibernateException(msg, eH); } DebugLog.info(GridGrouperHibernateHelper.class, "saved: " + saves.size() + " deleted: " + deletes.size()); } // protected static void saveAndDelete(saves, deletes) // PRIVATE CLASS METHODS // private static Object _getPersistent(Session hs, Object o) { boolean persistent = false; if (hs.contains(o)) { persistent = true; } else { try { hs.update(o); if (hs.contains(o)) { persistent = true; } } catch (HibernateException eH) { ErrorLog.error(GridGrouperHibernateHelper.class, E.HH_GETPERSISTENT + eH.getMessage()); } } if (persistent == false) { // I think this was done in an effort to try and get an actual stacktrace // but does it actually work? try { throw new GrouperRuntimeException(); } catch (GrouperRuntimeException eGR) { String msg = E.HIBERNATE_GETPERSISTENT + o + ":" + eGR.getMessage(); ErrorLog.fatal(GridGrouperHibernateHelper.class, msg); eGR.printStackTrace(); throw new GrouperRuntimeException(msg, eGR); } } return o; } // private static Object _getPersistent(hs, o) }
/******************************************************************************* * Copyright 2011-2013 Sergey Tarasevich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.nostra13.universalimageloader.core; import android.graphics.Bitmap; import android.os.Handler; import android.os.Looper; import android.text.TextUtils; import android.view.View; import android.widget.ImageView; import com.nostra13.universalimageloader.cache.disc.DiskCache; import com.nostra13.universalimageloader.cache.memory.MemoryCache; import com.nostra13.universalimageloader.core.assist.FailReason; import com.nostra13.universalimageloader.core.assist.FlushedInputStream; import com.nostra13.universalimageloader.core.assist.ImageSize; import com.nostra13.universalimageloader.core.assist.LoadedFrom; import com.nostra13.universalimageloader.core.assist.ViewScaleType; import com.nostra13.universalimageloader.core.imageaware.ImageAware; import com.nostra13.universalimageloader.core.imageaware.ImageViewAware; import com.nostra13.universalimageloader.core.imageaware.NonViewAware; import com.nostra13.universalimageloader.core.listener.ImageLoadingListener; import com.nostra13.universalimageloader.core.listener.ImageLoadingProgressListener; import com.nostra13.universalimageloader.core.listener.SimpleImageLoadingListener; import com.nostra13.universalimageloader.utils.ImageSizeUtils; import com.nostra13.universalimageloader.utils.L; import com.nostra13.universalimageloader.utils.MemoryCacheUtils; /** * Singletone for image loading and displaying at {@link ImageView ImageViews}<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before any other method. * * @author Sergey Tarasevich (nostra13[at]gmail[dot]com) * @since 1.0.0 */ public class ImageLoader { public static final String TAG = ImageLoader.class.getSimpleName(); static final String LOG_INIT_CONFIG = "Initialize ImageLoader with configuration"; static final String LOG_DESTROY = "Destroy ImageLoader"; static final String LOG_LOAD_IMAGE_FROM_MEMORY_CACHE = "Load image from memory cache [%s]"; private static final String WARNING_RE_INIT_CONFIG = "Try to initialize ImageLoader which had already been initialized before. " + "To re-init ImageLoader with new configuration call ImageLoader.destroy() at first."; private static final String ERROR_WRONG_ARGUMENTS = "Wrong arguments were passed to displayImage() method (ImageView reference must not be null)"; private static final String ERROR_NOT_INIT = "ImageLoader must be init with configuration before using"; private static final String ERROR_INIT_CONFIG_WITH_NULL = "ImageLoader configuration can not be initialized with null"; private ImageLoaderConfiguration configuration; private ImageLoaderEngine engine; private final ImageLoadingListener emptyListener = new SimpleImageLoadingListener(); private volatile static ImageLoader instance; /** Returns singleton class instance */ public static ImageLoader getInstance() { if (instance == null) { synchronized (ImageLoader.class) { if (instance == null) { instance = new ImageLoader(); } } } return instance; } protected ImageLoader() { } /** * Initializes ImageLoader instance with configuration.<br /> * If configurations was set before ( {@link #isInited()} == true) then this method does nothing.<br /> * To force initialization with new configuration you should {@linkplain #destroy() destroy ImageLoader} at first. * * @param configuration {@linkplain ImageLoaderConfiguration ImageLoader configuration} * @throws IllegalArgumentException if <b>configuration</b> parameter is null */ public synchronized void init(ImageLoaderConfiguration configuration) { if (configuration == null) { throw new IllegalArgumentException(ERROR_INIT_CONFIG_WITH_NULL); } if (this.configuration == null) { L.d(LOG_INIT_CONFIG); engine = new ImageLoaderEngine(configuration); this.configuration = configuration; } else { L.w(WARNING_RE_INIT_CONFIG); } } /** * Returns <b>true</b> - if ImageLoader {@linkplain #init(ImageLoaderConfiguration) is initialized with * configuration}; <b>false</b> - otherwise */ public boolean isInited() { return configuration != null; } /** * Adds display image task to execution pool. Image will be set to ImageAware when it's turn. <br/> * Default {@linkplain DisplayImageOptions display image options} from {@linkplain ImageLoaderConfiguration * configuration} will be used.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageAware {@linkplain com.nostra13.universalimageloader.core.imageaware.ImageAware Image aware view} * which should display image * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageAware</b> is null */ public void displayImage(String uri, ImageAware imageAware) { displayImage(uri, imageAware, null, null, null); } /** * Adds display image task to execution pool. Image will be set to ImageAware when it's turn.<br /> * Default {@linkplain DisplayImageOptions display image options} from {@linkplain ImageLoaderConfiguration * configuration} will be used.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageAware {@linkplain com.nostra13.universalimageloader.core.imageaware.ImageAware Image aware view} * which should display image * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires events on * UI thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageAware</b> is null */ public void displayImage(String uri, ImageAware imageAware, ImageLoadingListener listener) { displayImage(uri, imageAware, null, listener, null); } /** * Adds display image task to execution pool. Image will be set to ImageAware when it's turn.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageAware {@linkplain com.nostra13.universalimageloader.core.imageaware.ImageAware Image aware view} * which should display image * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageAware</b> is null */ public void displayImage(String uri, ImageAware imageAware, DisplayImageOptions options) { displayImage(uri, imageAware, options, null, null); } /** * Adds display image task to execution pool. Image will be set to ImageAware when it's turn.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageAware {@linkplain com.nostra13.universalimageloader.core.imageaware.ImageAware Image aware view} * which should display image * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used. * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires events on * UI thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageAware</b> is null */ public void displayImage(String uri, ImageAware imageAware, DisplayImageOptions options, ImageLoadingListener listener) { displayImage(uri, imageAware, options, listener, null); } /** * Adds display image task to execution pool. Image will be set to ImageAware when it's turn.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageAware {@linkplain com.nostra13.universalimageloader.core.imageaware.ImageAware Image aware view} * which should display image * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used. * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires * events on UI thread if this method is called on UI thread. * @param progressListener {@linkplain com.nostra13.universalimageloader.core.listener.ImageLoadingProgressListener * Listener} for image loading progress. Listener fires events on UI thread if this method * is called on UI thread. Caching on disk should be enabled in * {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions options} to make * this listener work. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageAware</b> is null */ public void displayImage(String uri, ImageAware imageAware, DisplayImageOptions options, ImageLoadingListener listener, ImageLoadingProgressListener progressListener) { checkConfiguration(); if (imageAware == null) { throw new IllegalArgumentException(ERROR_WRONG_ARGUMENTS); } if (listener == null) { listener = emptyListener; } if (options == null) { options = configuration.defaultDisplayImageOptions; } if (TextUtils.isEmpty(uri)) { engine.cancelDisplayTaskFor(imageAware); listener.onLoadingStarted(uri, imageAware.getWrappedView()); if (options.shouldShowImageForEmptyUri()) { imageAware.setImageDrawable(options.getImageForEmptyUri(configuration.resources)); } else { imageAware.setImageDrawable(null); } listener.onLoadingComplete(uri, imageAware.getWrappedView(), null, null); return; } ImageSize targetSize = ImageSizeUtils.defineTargetSizeForView(imageAware, configuration.getMaxImageSize()); String memoryCacheKey = MemoryCacheUtils.generateKey(uri, targetSize); engine.prepareDisplayTaskFor(imageAware, memoryCacheKey); listener.onLoadingStarted(uri, imageAware.getWrappedView()); Bitmap bmp = configuration.memoryCache.get(memoryCacheKey); if (bmp != null && !bmp.isRecycled()) { L.d(LOG_LOAD_IMAGE_FROM_MEMORY_CACHE, memoryCacheKey); if (options.shouldPostProcess()) { ImageLoadingInfo imageLoadingInfo = new ImageLoadingInfo(uri, imageAware, targetSize, memoryCacheKey, options, listener, progressListener, engine.getLockForUri(uri)); ProcessAndDisplayImageTask displayTask = new ProcessAndDisplayImageTask(engine, bmp, imageLoadingInfo, defineHandler(options)); if (options.isSyncLoading()) { displayTask.run(); } else { engine.submit(displayTask); } } else { options.getDisplayer().display(bmp, imageAware, LoadedFrom.MEMORY_CACHE); listener.onLoadingComplete(uri, imageAware.getWrappedView(), bmp, LoadedFrom.MEMORY_CACHE); } } else { if (options.shouldShowImageOnLoading()) { imageAware.setImageDrawable(options.getImageOnLoading(configuration.resources)); } else if (options.isResetViewBeforeLoading()) { imageAware.setImageDrawable(null); } ImageLoadingInfo imageLoadingInfo = new ImageLoadingInfo(uri, imageAware, targetSize, memoryCacheKey, options, listener, progressListener, engine.getLockForUri(uri)); LoadAndDisplayImageTask displayTask = new LoadAndDisplayImageTask(engine, imageLoadingInfo, defineHandler(options)); if (options.isSyncLoading()) { displayTask.run(); } else { engine.submit(displayTask); } } } /** * Adds display image task to execution pool. Image will be set to ImageView when it's turn. <br/> * Default {@linkplain DisplayImageOptions display image options} from {@linkplain ImageLoaderConfiguration * configuration} will be used.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageView {@link ImageView} which should display image * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageView</b> is null */ public void displayImage(String uri, ImageView imageView) { displayImage(uri, new ImageViewAware(imageView), null, null, null); } /** * Adds display image task to execution pool. Image will be set to ImageView when it's turn.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageView {@link ImageView} which should display image * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageView</b> is null */ public void displayImage(String uri, ImageView imageView, DisplayImageOptions options) { displayImage(uri, new ImageViewAware(imageView), options, null, null); } /** * Adds display image task to execution pool. Image will be set to ImageView when it's turn.<br /> * Default {@linkplain DisplayImageOptions display image options} from {@linkplain ImageLoaderConfiguration * configuration} will be used.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageView {@link ImageView} which should display image * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires events on * UI thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageView</b> is null */ public void displayImage(String uri, ImageView imageView, ImageLoadingListener listener) { displayImage(uri, new ImageViewAware(imageView), null, listener, null); } /** * Adds display image task to execution pool. Image will be set to ImageView when it's turn.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageView {@link ImageView} which should display image * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used. * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires events on * UI thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageView</b> is null */ public void displayImage(String uri, ImageView imageView, DisplayImageOptions options, ImageLoadingListener listener) { displayImage(uri, imageView, options, listener, null); } /** * Adds display image task to execution pool. Image will be set to ImageView when it's turn.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param imageView {@link ImageView} which should display image * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used. * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires * events on UI thread if this method is called on UI thread. * @param progressListener {@linkplain com.nostra13.universalimageloader.core.listener.ImageLoadingProgressListener * Listener} for image loading progress. Listener fires events on UI thread if this method * is called on UI thread. Caching on disk should be enabled in * {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions options} to make * this listener work. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @throws IllegalArgumentException if passed <b>imageView</b> is null */ public void displayImage(String uri, ImageView imageView, DisplayImageOptions options, ImageLoadingListener listener, ImageLoadingProgressListener progressListener) { displayImage(uri, new ImageViewAware(imageView), options, listener, progressListener); } /** * Adds load image task to execution pool. Image will be returned with * {@link ImageLoadingListener#onLoadingComplete(String, android.view.View, android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. * <br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires events on UI * thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public void loadImage(String uri, ImageLoadingListener listener) { loadImage(uri, null, null, listener, null); } /** * Adds load image task to execution pool. Image will be returned with * {@link ImageLoadingListener#onLoadingComplete(String, android.view.View, android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. * <br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param targetImageSize Minimal size for {@link Bitmap} which will be returned in * {@linkplain ImageLoadingListener#onLoadingComplete(String, android.view.View, * android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. Downloaded image will be decoded * and scaled to {@link Bitmap} of the size which is <b>equal or larger</b> (usually a bit * larger) than incoming targetImageSize. * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires * events on UI thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public void loadImage(String uri, ImageSize targetImageSize, ImageLoadingListener listener) { loadImage(uri, targetImageSize, null, listener, null); } /** * Adds load image task to execution pool. Image will be returned with * {@link ImageLoadingListener#onLoadingComplete(String, android.view.View, android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. * <br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) from * configuration} will be used.<br /> * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires events on UI * thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public void loadImage(String uri, DisplayImageOptions options, ImageLoadingListener listener) { loadImage(uri, null, options, listener, null); } /** * Adds load image task to execution pool. Image will be returned with * {@link ImageLoadingListener#onLoadingComplete(String, android.view.View, android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. * <br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param targetImageSize Minimal size for {@link Bitmap} which will be returned in * {@linkplain ImageLoadingListener#onLoadingComplete(String, android.view.View, * android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. Downloaded image will be decoded * and scaled to {@link Bitmap} of the size which is <b>equal or larger</b> (usually a bit * larger) than incoming targetImageSize. * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used.<br /> * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires * events on UI thread if this method is called on UI thread. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public void loadImage(String uri, ImageSize targetImageSize, DisplayImageOptions options, ImageLoadingListener listener) { loadImage(uri, targetImageSize, options, listener, null); } /** * Adds load image task to execution pool. Image will be returned with * {@link ImageLoadingListener#onLoadingComplete(String, android.view.View, android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. * <br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param targetImageSize Minimal size for {@link Bitmap} which will be returned in * {@linkplain ImageLoadingListener#onLoadingComplete(String, android.view.View, * android.graphics.Bitmap, com.nostra13.universalimageloader.core.assist.LoadedFrom)} callback}. Downloaded image will be decoded * and scaled to {@link Bitmap} of the size which is <b>equal or larger</b> (usually a bit * larger) than incoming targetImageSize. * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and displaying. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used.<br /> * @param listener {@linkplain ImageLoadingListener Listener} for image loading process. Listener fires * events on UI thread if this method is called on UI thread. * @param progressListener {@linkplain com.nostra13.universalimageloader.core.listener.ImageLoadingProgressListener * Listener} for image loading progress. Listener fires events on UI thread if this method * is called on UI thread. Caching on disk should be enabled in * {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions options} to make * this listener work. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public void loadImage(String uri, ImageSize targetImageSize, DisplayImageOptions options, ImageLoadingListener listener, ImageLoadingProgressListener progressListener) { checkConfiguration(); if (targetImageSize == null) { targetImageSize = configuration.getMaxImageSize(); } if (options == null) { options = configuration.defaultDisplayImageOptions; } NonViewAware imageAware = new NonViewAware(uri, targetImageSize, ViewScaleType.CROP); displayImage(uri, imageAware, options, listener, progressListener); } /** * Loads and decodes image synchronously.<br /> * Default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) from * configuration} will be used.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @return Result image Bitmap. Can be <b>null</b> if image loading/decoding was failed or cancelled. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public Bitmap loadImageSync(String uri) { return loadImageSync(uri, null, null); } /** * Loads and decodes image synchronously.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and scaling. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) from * configuration} will be used. * @return Result image Bitmap. Can be <b>null</b> if image loading/decoding was failed or cancelled. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public Bitmap loadImageSync(String uri, DisplayImageOptions options) { return loadImageSync(uri, null, options); } /** * Loads and decodes image synchronously.<br /> * Default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) from * configuration} will be used.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param targetImageSize Minimal size for {@link Bitmap} which will be returned. Downloaded image will be decoded * and scaled to {@link Bitmap} of the size which is <b>equal or larger</b> (usually a bit * larger) than incoming targetImageSize. * @return Result image Bitmap. Can be <b>null</b> if image loading/decoding was failed or cancelled. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public Bitmap loadImageSync(String uri, ImageSize targetImageSize) { return loadImageSync(uri, targetImageSize, null); } /** * Loads and decodes image synchronously.<br /> * <b>NOTE:</b> {@link #init(ImageLoaderConfiguration)} method must be called before this method call * * @param uri Image URI (i.e. "http://site.com/image.png", "file:///mnt/sdcard/image.png") * @param targetImageSize Minimal size for {@link Bitmap} which will be returned. Downloaded image will be decoded * and scaled to {@link Bitmap} of the size which is <b>equal or larger</b> (usually a bit * larger) than incoming targetImageSize. * @param options {@linkplain com.nostra13.universalimageloader.core.DisplayImageOptions Options} for image * decoding and scaling. If <b>null</b> - default display image options * {@linkplain ImageLoaderConfiguration.Builder#defaultDisplayImageOptions(DisplayImageOptions) * from configuration} will be used. * @return Result image Bitmap. Can be <b>null</b> if image loading/decoding was failed or cancelled. * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public Bitmap loadImageSync(String uri, ImageSize targetImageSize, DisplayImageOptions options) { if (options == null) { options = configuration.defaultDisplayImageOptions; } options = new DisplayImageOptions.Builder().cloneFrom(options).syncLoading(true).build(); SyncImageLoadingListener listener = new SyncImageLoadingListener(); loadImage(uri, targetImageSize, options, listener); return listener.getLoadedBitmap(); } public void preloadImage(String uri, DisplayImageOptions options, ImageLoadingListener listener) { checkConfiguration(); if(!configuration.preloadEnabled || configuration.taskExecutorForPreload == null) { return; } if(TextUtils.isEmpty(uri)) { return; } if (options == null) { options = configuration.defaultDisplayImageOptions; } if(!options.isCacheOnDisk()) { return; } if(engine.getPausedPreload().get()) { return; } ImageLoadingInfo imageLoadingInfo = new ImageLoadingInfo(uri, null, null, null, options, listener, null, engine.getLockForUri(uri)); PreloadToDiskTask displayTask = new PreloadToDiskTask(engine, imageLoadingInfo, options.getHandler()); engine.submit(displayTask); } /** * Checks if ImageLoader's configuration was initialized * * @throws IllegalStateException if configuration wasn't initialized */ private void checkConfiguration() { if (configuration == null) { throw new IllegalStateException(ERROR_NOT_INIT); } } /** * Returns memory cache * * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public MemoryCache getMemoryCache() { checkConfiguration(); return configuration.memoryCache; } /** * Clears memory cache * * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public void clearMemoryCache() { checkConfiguration(); configuration.memoryCache.clear(); } /** * Returns disk cache * * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @deprecated Use {@link #getDiskCache()} instead */ @Deprecated public DiskCache getDiscCache() { return getDiskCache(); } /** * Returns disk cache * * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public DiskCache getDiskCache() { checkConfiguration(); return configuration.diskCache; } /** * Clears disk cache. * * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before * @deprecated Use {@link #clearDiskCache()} instead */ @Deprecated public void clearDiscCache() { clearDiskCache(); } /** * Clears disk cache. * * @throws IllegalStateException if {@link #init(ImageLoaderConfiguration)} method wasn't called before */ public void clearDiskCache() { checkConfiguration(); configuration.diskCache.clear(); } /** * Returns URI of image which is loading at this moment into passed * {@link com.nostra13.universalimageloader.core.imageaware.ImageAware ImageAware} */ public String getLoadingUriForView(ImageAware imageAware) { return engine.getLoadingUriForView(imageAware); } /** * Returns URI of image which is loading at this moment into passed * {@link android.widget.ImageView ImageView} */ public String getLoadingUriForView(ImageView imageView) { return engine.getLoadingUriForView(new ImageViewAware(imageView)); } /** * Cancel the task of loading and displaying image for passed * {@link com.nostra13.universalimageloader.core.imageaware.ImageAware ImageAware}. * * @param imageAware {@link com.nostra13.universalimageloader.core.imageaware.ImageAware ImageAware} for * which display task will be cancelled */ public void cancelDisplayTask(ImageAware imageAware) { engine.cancelDisplayTaskFor(imageAware); } /** * Cancel the task of loading and displaying image for passed * {@link android.widget.ImageView ImageView}. * * @param imageView {@link android.widget.ImageView ImageView} for which display task will be cancelled */ public void cancelDisplayTask(ImageView imageView) { engine.cancelDisplayTaskFor(new ImageViewAware(imageView)); } /** * Denies or allows ImageLoader to download images from the network.<br /> * <br /> * If downloads are denied and if image isn't cached then * {@link ImageLoadingListener#onLoadingFailed(String, View, FailReason)} callback will be fired with * {@link FailReason.FailType#NETWORK_DENIED} * * @param denyNetworkDownloads pass <b>true</b> - to deny engine to download images from the network; <b>false</b> - * to allow engine to download images from network. */ public void denyNetworkDownloads(boolean denyNetworkDownloads) { engine.denyNetworkDownloads(denyNetworkDownloads); } /** * Sets option whether ImageLoader will use {@link FlushedInputStream} for network downloads to handle <a * href="http://code.google.com/p/android/issues/detail?id=6066">this known problem</a> or not. * * @param handleSlowNetwork pass <b>true</b> - to use {@link FlushedInputStream} for network downloads; <b>false</b> * - otherwise. */ public void handleSlowNetwork(boolean handleSlowNetwork) { engine.handleSlowNetwork(handleSlowNetwork); } /** * Pause ImageLoader. All new "load&display" tasks won't be executed until ImageLoader is {@link #resume() resumed}. * <br /> * Already running tasks are not paused. */ public void pause() { engine.pause(); } public void pausePreload() { engine.pausePreload(); } /** Resumes waiting "load&display" tasks */ public void resume() { engine.resume(); } public void resumePreload() { engine.resumePreload(); } /** * Cancels all running and scheduled display image tasks.<br /> * <b>NOTE:</b> This method doesn't shutdown * {@linkplain com.nostra13.universalimageloader.core.ImageLoaderConfiguration.Builder#taskExecutor(java.util.concurrent.Executor) * custom task executors} if you set them.<br /> * ImageLoader still can be used after calling this method. */ public void stop() { engine.stop(); } /** * {@linkplain #stop() Stops ImageLoader} and clears current configuration. <br /> * You can {@linkplain #init(ImageLoaderConfiguration) init} ImageLoader with new configuration after calling this * method. */ public void destroy() { if (configuration != null) L.d(LOG_DESTROY); stop(); configuration.diskCache.close(); engine = null; configuration = null; } private static Handler defineHandler(DisplayImageOptions options) { Handler handler = options.getHandler(); if (options.isSyncLoading()) { handler = null; } else if (handler == null && Looper.myLooper() == Looper.getMainLooper()) { handler = new Handler(); } return handler; } /** * Listener which is designed for synchronous image loading. * * @author Sergey Tarasevich (nostra13[at]gmail[dot]com) * @since 1.9.0 */ private static class SyncImageLoadingListener extends SimpleImageLoadingListener { private Bitmap loadedImage; @Override public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage, LoadedFrom loadedFrom) { this.loadedImage = loadedImage; } public Bitmap getLoadedBitmap() { return loadedImage; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.state.stack.upgrade; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.controller.internal.OperatingSystemResourceProvider; import org.apache.ambari.server.controller.internal.RepositoryResourceProvider; import org.apache.ambari.server.controller.internal.RepositoryVersionResourceProvider; import org.apache.ambari.server.orm.entities.OperatingSystemEntity; import org.apache.ambari.server.orm.entities.RepositoryEntity; import org.apache.ambari.server.state.RepositoryInfo; import org.apache.ambari.server.state.stack.UpgradePack; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.inject.Inject; import com.google.inject.Singleton; /** * Provides helper methods to manage repository versions. */ @Singleton public class RepositoryVersionHelper { private static final Logger LOG = LoggerFactory.getLogger(RepositoryVersionHelper.class); @Inject private Gson gson; @Inject(optional = true) private AmbariMetaInfo ambariMetaInfo; /** * Parses operating systems json to a list of entities. Expects json like: * <pre> * [ * { * "repositories":[ * { * "Repositories/base_url":"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0", * "Repositories/repo_name":"HDP-UTILS", * "Repositories/repo_id":"HDP-UTILS-1.1.0.20" * }, * { * "Repositories/base_url":"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0", * "Repositories/repo_name":"HDP", * "Repositories/repo_id":"HDP-2.2" * } * ], * "OperatingSystems/os_type":"redhat6" * } * ] * </pre> * @param repositoriesJson operating systems json * @return list of operating system entities * @throws Exception if any kind of json parsing error happened */ public List<OperatingSystemEntity> parseOperatingSystems(String repositoriesJson) throws Exception { final List<OperatingSystemEntity> operatingSystems = new ArrayList<OperatingSystemEntity>(); final JsonArray rootJson = new JsonParser().parse(repositoriesJson).getAsJsonArray(); for (JsonElement operatingSystemJson: rootJson) { JsonObject osObj = operatingSystemJson.getAsJsonObject(); final OperatingSystemEntity operatingSystemEntity = new OperatingSystemEntity(); operatingSystemEntity.setOsType(osObj.get(OperatingSystemResourceProvider.OPERATING_SYSTEM_OS_TYPE_PROPERTY_ID).getAsString()); if (osObj.has(OperatingSystemResourceProvider.OPERATING_SYSTEM_AMBARI_MANAGED_REPOS)) { operatingSystemEntity.setAmbariManagedRepos(osObj.get( OperatingSystemResourceProvider.OPERATING_SYSTEM_AMBARI_MANAGED_REPOS).getAsBoolean()); } for (JsonElement repositoryElement: osObj.get(RepositoryVersionResourceProvider.SUBRESOURCE_REPOSITORIES_PROPERTY_ID).getAsJsonArray()) { final RepositoryEntity repositoryEntity = new RepositoryEntity(); final JsonObject repositoryJson = repositoryElement.getAsJsonObject(); repositoryEntity.setBaseUrl(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_BASE_URL_PROPERTY_ID).getAsString()); repositoryEntity.setName(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_REPO_NAME_PROPERTY_ID).getAsString()); repositoryEntity.setRepositoryId(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_REPO_ID_PROPERTY_ID).getAsString()); if (repositoryJson.get(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID) != null) { repositoryEntity.setUnique(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID).getAsBoolean()); } operatingSystemEntity.getRepositories().add(repositoryEntity); } operatingSystems.add(operatingSystemEntity); } return operatingSystems; } /** * Serializes repository info to json for storing to DB. * Produces json like: * <pre> * [ * { * "repositories":[ * { * "Repositories/base_url":"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0", * "Repositories/repo_name":"HDP-UTILS", * "Repositories/repo_id":"HDP-UTILS-1.1.0.20" * }, * { * "Repositories/base_url":"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0", * "Repositories/repo_name":"HDP", * "Repositories/repo_id":"HDP-2.2" * } * ], * "OperatingSystems/os_type":"redhat6" * } * ] * </pre> * * @param repositories list of repository infos * @return serialized list of operating systems */ public String serializeOperatingSystems(List<RepositoryInfo> repositories) { final JsonArray rootJson = new JsonArray(); final Multimap<String, RepositoryInfo> operatingSystems = ArrayListMultimap.create(); for (RepositoryInfo repository: repositories) { operatingSystems.put(repository.getOsType(), repository); } for (Entry<String, Collection<RepositoryInfo>> operatingSystem : operatingSystems.asMap().entrySet()) { final JsonObject operatingSystemJson = new JsonObject(); final JsonArray repositoriesJson = new JsonArray(); for (RepositoryInfo repository : operatingSystem.getValue()) { final JsonObject repositoryJson = new JsonObject(); repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_BASE_URL_PROPERTY_ID, repository.getBaseUrl()); repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_REPO_NAME_PROPERTY_ID, repository.getRepoName()); repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_REPO_ID_PROPERTY_ID, repository.getRepoId()); repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID, repository.isUnique()); repositoriesJson.add(repositoryJson); operatingSystemJson.addProperty(OperatingSystemResourceProvider.OPERATING_SYSTEM_AMBARI_MANAGED_REPOS, repository.isAmbariManagedRepositories()); } operatingSystemJson.add(RepositoryVersionResourceProvider.SUBRESOURCE_REPOSITORIES_PROPERTY_ID, repositoriesJson); operatingSystemJson.addProperty(OperatingSystemResourceProvider.OPERATING_SYSTEM_OS_TYPE_PROPERTY_ID, operatingSystem.getKey()); rootJson.add(operatingSystemJson); } return gson.toJson(rootJson); } public String serializeOperatingSystemEntities(List<OperatingSystemEntity> operatingSystems) { List<RepositoryInfo> repositoryInfos = new ArrayList<>(); for (OperatingSystemEntity os: operatingSystems) { for (RepositoryEntity repositoryEntity: os.getRepositories()) { RepositoryInfo repositoryInfo = new RepositoryInfo(); repositoryInfo.setRepoId(repositoryEntity.getRepositoryId()); repositoryInfo.setRepoName(repositoryEntity.getName()); repositoryInfo.setBaseUrl(repositoryEntity.getBaseUrl()); repositoryInfo.setOsType(os.getOsType()); repositoryInfo.setAmbariManagedRepositories(os.isAmbariManagedRepos()); repositoryInfos.add(repositoryInfo); } } return serializeOperatingSystems(repositoryInfos); } /** * Scans the given stack for upgrade packages which can be applied to update the cluster to given repository version. * * @param stackName stack name * @param stackVersion stack version * @param repositoryVersion target repository version * @param upgradeType if not {@code null} null, will only return upgrade packs whose type matches. * @return upgrade pack name * @throws AmbariException if no upgrade packs suit the requirements */ public String getUpgradePackageName(String stackName, String stackVersion, String repositoryVersion, UpgradeType upgradeType) throws AmbariException { final Map<String, UpgradePack> upgradePacks = ambariMetaInfo.getUpgradePacks(stackName, stackVersion); for (UpgradePack upgradePack : upgradePacks.values()) { final String upgradePackName = upgradePack.getName(); if (null != upgradeType && upgradePack.getType() != upgradeType) { continue; } // check that upgrade pack has <target> node if (StringUtils.isBlank(upgradePack.getTarget())) { LOG.error("Upgrade pack " + upgradePackName + " is corrupted, it should contain <target> node"); continue; } if (upgradePack.canBeApplied(repositoryVersion)) { return upgradePackName; } } throw new AmbariException("There were no suitable upgrade packs for stack " + stackName + " " + stackVersion + ((null != upgradeType) ? " and upgrade type " + upgradeType : "")); } }
package com.tenxerconsulting.swagger.doclet.parser; import static com.google.common.collect.Collections2.filter; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.google.common.base.Predicate; import com.sun.javadoc.ClassDoc; import com.sun.javadoc.FieldDoc; import com.sun.javadoc.MethodDoc; import com.sun.javadoc.ParameterizedType; import com.sun.javadoc.Type; import com.sun.javadoc.TypeVariable; import com.tenxerconsulting.swagger.doclet.model.ModelWrapper; import com.tenxerconsulting.swagger.doclet.model.PropertyWrapper; import io.swagger.models.Model; import com.tenxerconsulting.swagger.doclet.DocletOptions; import com.tenxerconsulting.swagger.doclet.translator.NameBasedTranslator; import com.tenxerconsulting.swagger.doclet.translator.Translator; import com.tenxerconsulting.swagger.doclet.translator.Translator.OptionalName; import io.swagger.models.ModelImpl; import io.swagger.models.properties.ArrayProperty; import io.swagger.models.properties.PropertyBuilder; import io.swagger.models.properties.RefProperty; import io.swagger.models.properties.StringProperty; import io.swagger.models.refs.RefType; /** * The ApiModelParser represents a parser for api model classes which are used for parameters, resource method return types and * model fields. * @version $Id$ */ public class ApiModelParser { private final DocletOptions options; final Translator translator; private final Type rootType; private final Set<ModelWrapper> models; private final Set<ModelWrapper> parentModels = new LinkedHashSet<>(); private final ClassDoc[] viewClasses; private final Collection<ClassDoc> docletClasses; private final boolean inheritFields; private Map<String, Type> varsToTypes = new HashMap<String, Type>(); // composite param model processing specifics private boolean composite = false; private boolean consumesMultipart = false; private List<ClassDoc> subTypeClasses = new ArrayList<ClassDoc>(); /** * This creates a ApiModelParser that inherits fields from super types * @param options * @param translator * @param rootType * @param viewClasses * @param docletClasses */ public ApiModelParser(DocletOptions options, Translator translator, Type rootType, ClassDoc[] viewClasses, Collection<ClassDoc> docletClasses) { this(options, translator, rootType, viewClasses, docletClasses, true); } /** * This creates a ApiModelParser for use when using composite parameter model parsing * @param options * @param translator * @param rootType * @param consumesMultipart * @param inheritFields whether to inherit fields from super types */ public ApiModelParser(DocletOptions options, Translator translator, Type rootType, boolean consumesMultipart, boolean inheritFields) { this(options, translator, rootType, null, null, inheritFields); this.consumesMultipart = consumesMultipart; this.composite = true; } /** * This creates an ApiModelParser for use only by sub model parsing * @param options * @param translator * @param rootType * @param viewClasses * @param inheritFields whether to inherit fields from super types * @param parentModels parent type models */ ApiModelParser(DocletOptions options, Translator translator, Type rootType, ClassDoc[] viewClasses, boolean inheritFields, Set<ModelWrapper> parentModels) { this(options, translator, rootType, viewClasses, null, inheritFields); this.parentModels.clear(); this.parentModels.addAll(parentModels); } /** * This creates a ApiModelParser * @param options * @param translator * @param rootType * @param viewClasses * @param docletClasses * @param inheritFields whether to inherit fields from super types */ ApiModelParser(DocletOptions options, Translator translator, Type rootType, ClassDoc[] viewClasses, Collection<ClassDoc> docletClasses, boolean inheritFields) { this.options = options; this.translator = translator; this.rootType = rootType; if (viewClasses == null) { this.viewClasses = null; } else { this.viewClasses = new ClassDoc[viewClasses.length]; int i = 0; for (ClassDoc view : viewClasses) { this.viewClasses[i++] = view; } } this.docletClasses = docletClasses; this.models = new LinkedHashSet<ModelWrapper>(); if (rootType.asClassDoc() != null && rootType.asClassDoc().superclass() != null) { AnnotationParser p = new AnnotationParser(rootType.asClassDoc().superclass(), this.options); for (String subTypeAnnotation : this.options.getSubTypesAnnotations()) { List<ClassDoc> annSubTypes = p.getAnnotationArrayTypes(subTypeAnnotation, "value", "value"); if (annSubTypes != null) { for (ClassDoc subType : annSubTypes) { if (this.translator.typeName(rootType.asClassDoc(), this.options.isUseFullModelIds()).value() .equals(this.translator.typeName(subType, this.options.isUseFullModelIds()).value())) { inheritFields = false; } } } } } this.inheritFields = inheritFields; } /** * This adds the given vars to types to the ones used by this model * @param varsToTypes * @return This */ public ApiModelParser addVarsToTypes(Map<String, Type> varsToTypes) { if (varsToTypes != null) { this.varsToTypes.putAll(varsToTypes); } return this; } /** * This parsers a model class built from parsing this class * @return The set of model classes */ public Set<ModelWrapper> parse() { this.subTypeClasses.clear(); parseModel(this.rootType, false); // process sub types for (ClassDoc subType : this.subTypeClasses) { ApiModelParser subTypeParser = new ApiModelParser(this.options, this.translator, subType, this.viewClasses, false, this.models); Set<ModelWrapper> subTypeModesl = subTypeParser.parse(); this.models.addAll(subTypeModesl); } return this.models; } private void parseModel(Type type, boolean nested) { String qName = type.qualifiedTypeName(); boolean isPrimitive = ParserHelper.isPrimitive(type, this.options); boolean isJavaxType = ParserHelper.isJavaxType(qName); boolean isBaseObject = qName.equals("java.lang.Object"); boolean isClass = qName.equals("java.lang.Class"); boolean isCollection = ParserHelper.isCollection(qName); boolean isArray = ParserHelper.isArray(type); boolean isMap = ParserHelper.isMap(qName); boolean isWildcard = qName.equals("?"); ClassDoc classDoc = type.asClassDoc(); if (isPrimitive || isJavaxType || isClass || isWildcard || isBaseObject || isCollection || isMap || isArray || classDoc == null || classDoc.isEnum() || alreadyStoredType(type, this.models) || alreadyStoredType(type, this.parentModels)) { return; } // check if its got an exclude tag // see if deprecated if (this.options.isExcludeDeprecatedModelClasses() && ParserHelper.isDeprecated(classDoc, this.options)) { return; } // see if excluded explicitly if (ParserHelper.hasTag(classDoc, this.options.getExcludeClassTags())) { return; } if (ParserHelper.hasAnnotation(classDoc, this.options.getExcludeClassAnnotations(), this.options)) { return; } // see if excluded via its FQN if (this.options.getExcludeModelPrefixes() != null && !this.options.getExcludeModelPrefixes().isEmpty()) { for (String prefix : this.options.getExcludeModelPrefixes()) { String className = classDoc.qualifiedName(); if (className.startsWith(prefix)) { return; } } } // if parameterized then build map of the param vars ParameterizedType pt = type.asParameterizedType(); if (pt != null) { Type[] typeArgs = pt.typeArguments(); if (typeArgs != null && typeArgs.length > 0) { TypeVariable[] vars = classDoc.typeParameters(); int i = 0; for (TypeVariable var : vars) { this.varsToTypes.put(var.qualifiedTypeName(), typeArgs[i]); i++; } } } Map<String, TypeRef> types = findReferencedTypes(classDoc, nested); Map<String, PropertyWrapper> elements = findReferencedElements(classDoc, types, nested); if (!elements.isEmpty() || classDoc.superclass() != null) { String modelId = this.translator.typeName(type, this.options.isUseFullModelIds(), this.viewClasses).value(); List<String> requiredFields = null; List<String> optionalFields = null; // build list of required and optional fields for (Map.Entry<String, TypeRef> fieldEntry : types.entrySet()) { String fieldName = fieldEntry.getKey(); TypeRef fieldDesc = fieldEntry.getValue(); Boolean required = fieldDesc.required; if ((required != null && required.booleanValue()) || (required == null && this.options.isModelFieldsRequiredByDefault())) { if (requiredFields == null) { requiredFields = new ArrayList<String>(); } requiredFields.add(fieldName); } if (required != null && !required.booleanValue()) { if (optionalFields == null) { optionalFields = new ArrayList<String>(); } optionalFields.add(fieldName); } } // look for sub types AnnotationParser p = new AnnotationParser(classDoc, this.options); List<String> subTypes = new ArrayList<String>(); for (String subTypeAnnotation : this.options.getSubTypesAnnotations()) { List<ClassDoc> annSubTypes = p.getAnnotationArrayTypes(subTypeAnnotation, "value", "value"); if (annSubTypes != null) { for (ClassDoc subType : annSubTypes) { String subTypeName = this.translator.typeName(subType, this.options.isUseFullModelIds()).value(); if (subTypeName != null) { subTypes.add(subTypeName); // add model for subtype this.subTypeClasses.add(subType); } } } } if (subTypes.isEmpty()) { subTypes = null; } String discriminator = null; for (String discriminatorAnnotation : this.options.getDiscriminatorAnnotations()) { String val = p.getAnnotationValue(discriminatorAnnotation, "property"); if (val != null) { discriminator = val; // auto add as model field if not already done if (!elements.containsKey(discriminator)) { StringProperty discriminatorProp = new StringProperty(); PropertyWrapper discriminatorPropWrapper = new PropertyWrapper(discriminator, null, discriminatorProp); elements.put(discriminator, discriminatorPropWrapper); } // auto add discriminator to required fields if (requiredFields == null || !requiredFields.contains(discriminator)) { if (requiredFields == null) { requiredFields = new ArrayList<String>(1); } requiredFields.add(discriminator); } break; } } ModelImpl model = new ModelImpl(); model.setReference(modelId); model.setType(ModelImpl.OBJECT); model.setRequired(requiredFields); model.setDiscriminator(discriminator); // TODO support subTypes ModelWrapper modelWrapper = new ModelWrapper(model, elements); this.models.add(modelWrapper); parseNestedModels(types.values()); } } /** * This gets the id of the root model * @return The id of the root model */ public String getRootModelId() { return this.translator.typeName(this.rootType, this.options.isUseFullModelIds(), this.viewClasses).value(); } static class TypeRef { String rawName; String paramCategory; String sourceDesc; Type type; String description; String format; String min; String max; String defaultValue; List<String> allowableValues; Boolean required; boolean hasView; TypeRef(String rawName, String paramCategory, String sourceDesc, Type type, String description, String format, String min, String max, String defaultValue, List<String> allowableValues, Boolean required, boolean hasView) { super(); this.rawName = rawName; this.paramCategory = paramCategory; this.sourceDesc = sourceDesc; this.type = type; this.description = description; this.format = format; this.min = min; this.max = max; this.defaultValue = defaultValue; this.allowableValues = allowableValues; this.required = required; this.hasView = hasView; } } // get list of super classes with highest level first so we process // grandparents down, this allows us to override field names via the lower levels List<ClassDoc> getClassLineage(ClassDoc classDoc) { List<ClassDoc> classes = new ArrayList<ClassDoc>(); if (!this.inheritFields) { classes.add(classDoc); return classes; } while (classDoc != null) { // ignore parent object class if (!ParserHelper.hasAncestor(classDoc)) { break; } classes.add(classDoc); if (classDoc.isInterface()) { for (ClassDoc iClassDoc: classDoc.interfaces()) { classes.add(iClassDoc); } break; } classDoc = classDoc.superclass(); } Collections.reverse(classes); return classes; } private Map<String, TypeRef> findReferencedTypes(ClassDoc rootClassDoc, boolean nested) { Map<String, TypeRef> elements = new LinkedHashMap<String, TypeRef>(); List<ClassDoc> classes = getClassLineage(rootClassDoc); // map of raw field names to translated names, translated names may be different // due to annotations like XMLElement Map<String, String> rawToTranslatedFields = new HashMap<String, String>(); for (ClassDoc classDoc : classes) { AnnotationParser p = new AnnotationParser(classDoc, this.options); String xmlAccessorType = p.getAnnotationValue("javax.xml.bind.annotation.XmlAccessorType", "value"); Set<String> customizedFieldNames = new HashSet<String>(); Set<String> excludeFields = new HashSet<String>(); Set<String> fieldNames = new HashSet<String>(); FieldDoc[] fieldDocs = classDoc.fields(false); // process fields processFields(nested, xmlAccessorType, fieldDocs, fieldNames, excludeFields, rawToTranslatedFields, customizedFieldNames, elements); // process methods MethodDoc[] methodDocs = classDoc.methods(); processMethods(nested, xmlAccessorType, methodDocs, excludeFields, rawToTranslatedFields, customizedFieldNames, elements); } // finally switch the element keys to use the translated field names Map<String, TypeRef> res = new LinkedHashMap<String, TypeRef>(); for (Map.Entry<String, TypeRef> entry : elements.entrySet()) { String rawName = entry.getKey(); String translatedName = rawToTranslatedFields.get(rawName); boolean overridden = translatedName != null && !translatedName.equals(rawName); String nameToUse = overridden ? translatedName : rawName; // see if we should override using naming conventions if (this.options.getModelFieldsNamingConvention() != null) { switch (this.options.getModelFieldsNamingConvention()) { case DEFAULT_NAME: // do nothing as the naming is ok as is break; case LOWERCASE: nameToUse = rawName.toLowerCase(); break; case LOWERCASE_UNLESS_OVERRIDDEN: nameToUse = overridden ? translatedName : rawName.toLowerCase(); break; case LOWER_UNDERSCORE: nameToUse = NamingConvention.toLowerUnderscore(rawName); break; case LOWER_UNDERSCORE_UNLESS_OVERRIDDEN: nameToUse = overridden ? translatedName : NamingConvention.toLowerUnderscore(rawName); break; case UPPERCASE: nameToUse = rawName.toUpperCase(); break; case UPPERCASE_UNLESS_OVERRIDDEN: nameToUse = overridden ? translatedName : rawName.toUpperCase(); break; default: break; } } TypeRef typeRef = entry.getValue(); if (this.composite && typeRef.paramCategory == null) { typeRef.paramCategory = "body"; } res.put(nameToUse, typeRef); } return res; } private void processFields(boolean nested, String xmlAccessorType, FieldDoc[] fieldDocs, Set<String> fieldNames, Set<String> excludeFields, Map<String, String> rawToTranslatedFields, Set<String> customizedFieldNames, Map<String, TypeRef> elements) { if (fieldDocs != null) { for (FieldDoc field : fieldDocs) { fieldNames.add(field.name()); FieldReader fieldReader = new FieldReader(this.options); String translatedName = this.translator.fieldName(field).value(); if (excludeField(field, translatedName)) { excludeFields.add(field.name()); } else { rawToTranslatedFields.put(field.name(), translatedName); if (!field.name().equals(translatedName)) { customizedFieldNames.add(field.name()); } if (checkFieldXmlAccess(xmlAccessorType, field)) { if (!elements.containsKey(translatedName)) { Type fieldType = getModelType(field.type(), nested); String description = fieldReader.getFieldDescription(field, true); String format = fieldReader.getFieldFormatValue(field, fieldType); String min = fieldReader.getFieldMin(field, fieldType); String max = fieldReader.getFieldMax(field, fieldType); Boolean required = fieldReader.getFieldRequired(field); boolean hasView = ParserHelper.hasJsonViews(field, this.options); String defaultValue = fieldReader.getFieldDefaultValue(field, fieldType); List<String> allowableValues = fieldReader.getFieldAllowableValues(field); String paramCategory = this.composite ? ParserHelper.paramTypeOf(false, this.consumesMultipart, field, fieldType, this.options) : null; elements.put(field.name(), new TypeRef(field.name(), paramCategory, " field: " + field.name(), fieldType, description, format, min, max, defaultValue, allowableValues, required, hasView)); } } } } } } private void processMethods(boolean nested, String xmlAccessorType, MethodDoc[] methodDocs, Set<String> excludeFields, Map<String, String> rawToTranslatedFields, Set<String> customizedFieldNames, Map<String, TypeRef> elements) { NameBasedTranslator nameTranslator = new NameBasedTranslator(this.options); if (methodDocs != null) { // loop through methods to find ones that should be excluded such as via @XmlTransient or other means // we do this first as the order of processing the methods varies per runtime env and // we want to make sure we group together setters and getters for (MethodDoc method : methodDocs) { if (checkMethodXmlAccess(xmlAccessorType, method)) { FieldReader returnTypeReader = new FieldReader(this.options); String translatedNameViaMethod = this.translator.methodName(method).value(); String rawFieldName = nameTranslator.methodName(method).value(); Type returnType = getModelType(method.returnType(), nested); // see if this is a getter or setter and either the field or previously processed getter/setter has been excluded // if so don't include this method if (rawFieldName != null && excludeFields.contains(rawFieldName)) { elements.remove(rawFieldName); continue; } // see if this method is to be directly excluded if (excludeMethod(method, translatedNameViaMethod)) { if (rawFieldName != null) { elements.remove(rawFieldName); excludeFields.add(rawFieldName); } continue; } boolean isFieldGetter = rawFieldName != null && method.name().startsWith("get") && (method.parameters() == null || method.parameters().length == 0); String description = returnTypeReader.getFieldDescription(method, isFieldGetter); String format = returnTypeReader.getFieldFormatValue(method, returnType); String min = returnTypeReader.getFieldMin(method, returnType); String max = returnTypeReader.getFieldMax(method, returnType); String defaultValue = returnTypeReader.getFieldDefaultValue(method, returnType); List<String> allowableValues = returnTypeReader.getFieldAllowableValues(method); Boolean required = returnTypeReader.getFieldRequired(method); boolean hasView = ParserHelper.hasJsonViews(method, this.options); // process getters/setters in a way that can override the field details if (rawFieldName != null) { // see if get method with parameter, if so then we exclude if (method.name().startsWith("get") && method.parameters() != null && method.parameters().length > 0) { continue; } // look for custom field names to use for getters/setters String translatedFieldName = rawToTranslatedFields.get(rawFieldName); if (!customizedFieldNames.contains(rawFieldName) && !translatedNameViaMethod.equals(translatedFieldName)) { rawToTranslatedFields.put(rawFieldName, translatedNameViaMethod); customizedFieldNames.add(rawFieldName); } TypeRef typeRef = elements.get(rawFieldName); if (typeRef == null) { // its a getter/setter but without a corresponding field typeRef = new TypeRef(rawFieldName, null, " method: " + method.name(), returnType, description, format, min, max, defaultValue, allowableValues, required, false); elements.put(rawFieldName, typeRef); } if (isFieldGetter) { // return type may not have been set if there is no corresponding field or it may be different // to the fields type if (typeRef.type != returnType) { typeRef.type = returnType; } } // set other field values if not previously set if (typeRef.description == null) { typeRef.description = description; } if (typeRef.format == null) { typeRef.format = format; } if (typeRef.min == null) { typeRef.min = min; } if (typeRef.max == null) { typeRef.max = max; } if (typeRef.defaultValue == null) { typeRef.defaultValue = defaultValue; } if (typeRef.allowableValues == null) { typeRef.allowableValues = allowableValues; } if (typeRef.required == null) { typeRef.required = required; } if (!typeRef.hasView && hasView) { typeRef.hasView = true; } if (typeRef.type != null && this.composite && typeRef.paramCategory == null) { typeRef.paramCategory = ParserHelper.paramTypeOf(false, this.consumesMultipart, method, typeRef.type, this.options); } } else { // its a non getter/setter String paramCategory = ParserHelper.paramTypeOf(false, this.consumesMultipart, method, returnType, this.options); elements.put(translatedNameViaMethod, new TypeRef(null, paramCategory, " method: " + method.name(), returnType, description, format, min, max, defaultValue, allowableValues, required, hasView)); } } } } } private boolean checkFieldXmlAccess(String xmlAccessorType, FieldDoc field) { // if xml access type checking is disabled then do nothing if (this.options.isModelFieldsXmlAccessTypeEnabled()) { AnnotationParser annotationParser = new AnnotationParser(field, this.options); boolean hasJaxbAnnotation = annotationParser.isAnnotatedByPrefix("javax.xml.bind.annotation."); // if none access then only include if the field has a jaxb annotation if ("javax.xml.bind.annotation.XmlAccessType.NONE".equals(xmlAccessorType)) { return hasJaxbAnnotation; } // if property return false unless annotated by a jaxb annotation if ("javax.xml.bind.annotation.XmlAccessType.PROPERTY".equals(xmlAccessorType)) { return hasJaxbAnnotation; } // if public or default then return true if field is public or if annotated by a jaxb annotation if ((xmlAccessorType == null && this.options.isModelFieldsDefaultXmlAccessTypeEnabled()) || "javax.xml.bind.annotation.XmlAccessType.PUBLIC_MEMBER".equals(xmlAccessorType)) { return field.isPublic() || hasJaxbAnnotation; } } return true; } private boolean checkMethodXmlAccess(String xmlAccessorType, MethodDoc method) { // if xml access type checking is disabled then do nothing if (this.options.isModelFieldsXmlAccessTypeEnabled()) { AnnotationParser annotationParser = new AnnotationParser(method, this.options); boolean hasJaxbAnnotation = annotationParser.isAnnotatedByPrefix("javax.xml.bind.annotation."); // if none access then only include if the method has a jaxb annotation if ("javax.xml.bind.annotation.XmlAccessType.NONE".equals(xmlAccessorType)) { return hasJaxbAnnotation; } // if field return false unless annotated by a jaxb annotation if ("javax.xml.bind.annotation.XmlAccessType.FIELD".equals(xmlAccessorType)) { return hasJaxbAnnotation; } // if public or default then return true if field is public or if annotated by a jaxb annotation if ((xmlAccessorType == null && this.options.isModelFieldsDefaultXmlAccessTypeEnabled()) || "javax.xml.bind.annotation.XmlAccessType.PUBLIC_MEMBER".equals(xmlAccessorType)) { return method.isPublic() || hasJaxbAnnotation; } } return true; } private boolean excludeField(FieldDoc field, String translatedName) { // ignore static or transient fields or _ prefixed ones if (field.isStatic() || field.isTransient() || field.name().charAt(0) == '_') { return true; } // ignore fields that have no name which will be the case for fields annotated with one of the // ignore annotations like JsonIgnore or XmlTransient if (translatedName == null) { return true; } // ignore deprecated fields if (this.options.isExcludeDeprecatedFields() && ParserHelper.isDeprecated(field, this.options)) { return true; } // ignore fields we are to explicitly exclude if (ParserHelper.hasTag(field, this.options.getExcludeFieldTags())) { return true; } if (ParserHelper.hasAnnotation(field, this.options.getExcludeFieldAnnotations(), this.options)) { return true; } // ignore fields that are for a different json view ClassDoc[] jsonViews = ParserHelper.getJsonViews(field, this.options); if (!ParserHelper.isItemPartOfView(this.viewClasses, jsonViews)) { return true; } return false; } private boolean excludeMethod(MethodDoc method, String translatedNameViaMethod) { // ignore static methods and private methods if (method.isStatic() || method.isPrivate() || method.name().charAt(0) == '_') { return true; } // check for ignored fields if (translatedNameViaMethod == null) { // this is a method that is to be ignored via @JsonIgnore or @XmlTransient return true; } // ignore deprecated methods if (this.options.isExcludeDeprecatedFields() && ParserHelper.isDeprecated(method, this.options)) { return true; } // ignore methods we are to explicitly exclude if (ParserHelper.hasTag(method, this.options.getExcludeFieldTags())) { return true; } if (ParserHelper.hasAnnotation(method, this.options.getExcludeFieldAnnotations(), this.options)) { return true; } // ignore methods that are for a different json view ClassDoc[] jsonViews = ParserHelper.getJsonViews(method, this.options); if (!ParserHelper.isItemPartOfView(this.viewClasses, jsonViews)) { return true; } return false; } private Map<String, PropertyWrapper> findReferencedElements(ClassDoc classDoc, Map<String, TypeRef> types, boolean nested) { Map<String, PropertyWrapper> elements = new LinkedHashMap<>(); for (Map.Entry<String, TypeRef> entry : types.entrySet()) { String typeName = entry.getKey(); TypeRef typeRef = entry.getValue(); Type type = typeRef.type; ClassDoc typeClassDoc = type.asClassDoc(); // change type name based on parent view ClassDoc[] views = typeRef.hasView ? this.viewClasses : null; OptionalName propertyTypeFormat = this.translator.typeName(type, this.options.isUseFullModelIds(), views); String propertyType = propertyTypeFormat.value(); // read allowableValues, either given via a javadoc tag, or for enums are automatically generated List<String> allowableValues = typeRef.allowableValues; if (allowableValues == null) { allowableValues = ParserHelper.getAllowableValues(typeClassDoc); } if (allowableValues != null) { propertyType = "string"; } Type containerOf = ParserHelper.getContainerType(type, this.varsToTypes, this.docletClasses); String itemsRef = null; String itemsType = null; String itemsFormat = null; List<String> itemsAllowableValues = null; if (containerOf != null) { itemsAllowableValues = ParserHelper.getAllowableValues(containerOf.asClassDoc()); if (itemsAllowableValues != null) { itemsType = "string"; } else { OptionalName oName = this.translator.typeName(containerOf, this.options.isUseFullModelIds(), views); if (ParserHelper.isPrimitive(containerOf, this.options)) { itemsType = oName.value(); itemsFormat = oName.getFormat(); } else { itemsRef = oName.value(); } } } Boolean uniqueItems = null; if (propertyType.equals("array")) { if (ParserHelper.isSet(type.qualifiedTypeName())) { uniqueItems = Boolean.TRUE; } } String validationContext = " for the " + typeRef.sourceDesc + " of the class: " + classDoc.name(); // validate min/max ParserHelper.verifyNumericValue(validationContext + " min value.", propertyTypeFormat.value(), propertyTypeFormat.getFormat(), typeRef.min); ParserHelper.verifyNumericValue(validationContext + " max value.", propertyTypeFormat.value(), propertyTypeFormat.getFormat(), typeRef.max); // if enum and default value check it matches the enum values if (allowableValues != null && typeRef.defaultValue != null && !allowableValues.contains(typeRef.defaultValue)) { throw new IllegalStateException(" Invalid value for the default value of the " + typeRef.sourceDesc + " it should be one of: " + allowableValues); } // verify default vs min, max and by itself if (typeRef.defaultValue != null) { if (typeRef.min == null && typeRef.max == null) { // just validate the default ParserHelper.verifyValue(validationContext + " default value.", propertyTypeFormat.value(), propertyTypeFormat.getFormat(), typeRef.defaultValue); } // if min/max then default is validated as part of comparison if (typeRef.min != null) { int comparison = ParserHelper.compareNumericValues(validationContext + " min value.", propertyTypeFormat.value(), propertyTypeFormat.getFormat(), typeRef.defaultValue, typeRef.min); if (comparison < 0) { throw new IllegalStateException("Invalid value for the default value of the " + typeRef.sourceDesc + " it should be >= the minimum: " + typeRef.min); } } if (typeRef.max != null) { int comparison = ParserHelper.compareNumericValues(validationContext + " max value.", propertyTypeFormat.value(), propertyTypeFormat.getFormat(), typeRef.defaultValue, typeRef.max); if (comparison > 0) { throw new IllegalStateException("Invalid value for the default value of the " + typeRef.sourceDesc + " it should be <= the maximum: " + typeRef.max); } } } // the format is either directly related to the type // or otherwise may be specified on the field via a javadoc tag String format = propertyTypeFormat.getFormat(); if (format == null) { format = typeRef.format; } PropertyWrapper propertyWrapper = buildPropertyWrapper(typeRef.rawName, typeRef.paramCategory, propertyType, format, typeRef.description, itemsRef, itemsType, itemsFormat, itemsAllowableValues, uniqueItems, allowableValues, typeRef.min, typeRef.max, typeRef.defaultValue); elements.put(typeName, propertyWrapper); } return elements; } private void parseNestedModels(Collection<TypeRef> types) { for (TypeRef type : types) { parseModel(type.type, true); // parse paramaterized types ParameterizedType pt = type.type.asParameterizedType(); if (pt != null) { Type[] typeArgs = pt.typeArguments(); if (typeArgs != null) { for (Type paramType : typeArgs) { parseModel(paramType, true); } } } } } private Type getModelType(Type type, boolean nested) { if (type != null) { ParameterizedType pt = type.asParameterizedType(); if (pt != null) { Type[] typeArgs = pt.typeArguments(); if (typeArgs != null && typeArgs.length > 0) { // if its a generic wrapper type then return the wrapped type if (this.options.getGenericWrapperTypes().contains(type.qualifiedTypeName())) { return typeArgs[0]; } // TODO what about maps? } } // if its a ref to a param type replace with the type impl Type paramType = ParserHelper.getVarType(type.asTypeVariable(), this.varsToTypes); if (paramType != null) { return paramType; } } return type; } /** * This gets the return type for a resource method, it supports wrapper types * @param options * @param type * @return The type to use for the resource method */ public static Type getReturnType(DocletOptions options, Type type) { if (type != null) { ParameterizedType pt = type.asParameterizedType(); if (pt != null) { Type[] typeArgs = pt.typeArguments(); if (typeArgs != null && typeArgs.length > 0) { // if its a generic wrapper type then return the wrapped type if (options.getGenericWrapperTypes().contains(type.qualifiedTypeName())) { return typeArgs[0]; } } } } return type; } private boolean alreadyStoredType(Type type, Set<ModelWrapper> apiModels) { // if a collection then the type to check is the param type Type containerOf = ParserHelper.getContainerType(type, this.varsToTypes, null); if (containerOf != null) { type = containerOf; } final Type typeToCheck = type; final ClassDoc[] viewClasses = this.viewClasses; final String modelId = this.translator.typeName(typeToCheck, this.options.isUseFullModelIds(), viewClasses).value(); return filter(apiModels, new Predicate<ModelWrapper>() { public boolean apply(ModelWrapper model) { return model.getModel().getReference().equals(modelId); } }).size() > 0; } private PropertyWrapper buildPropertyWrapper(String rawFieldName, String paramCategory, String type, String format, String description, String itemsRef, String itemsType, String itemsFormat, List<String> itemsAllowableValues, Boolean uniqueItems, List<String> allowableValues, String minimum, String maximum, String defaultValue) { Map<PropertyBuilder.PropertyId, Object> args = new HashMap<>(); args.put(PropertyBuilder.PropertyId.DESCRIPTION, description); args.put(PropertyBuilder.PropertyId.ENUM, allowableValues); args.put(PropertyBuilder.PropertyId.UNIQUE_ITEMS, uniqueItems); args.put(PropertyBuilder.PropertyId.MINIMUM, minimum); args.put(PropertyBuilder.PropertyId.MAXIMUM, maximum); args.put(PropertyBuilder.PropertyId.DEFAULT, defaultValue); io.swagger.models.properties.Property property = PropertyBuilder.build(type, format, args); // if PropertyBuilder.build is null then this is a RefProperty if (property == null) { property = new RefProperty(type); } if (property instanceof ArrayProperty) { ((ArrayProperty) property).setItems(ParserHelper.buildItems(itemsRef, itemsType, itemsFormat, itemsAllowableValues, uniqueItems)); } return new PropertyWrapper(rawFieldName, paramCategory, property); } }
package org.broadinstitute.hellbender.tools.copynumber.formats.collections; import com.google.common.collect.ImmutableList; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMTextHeaderCodec; import htsjdk.samtools.util.BufferedLineReader; import htsjdk.samtools.util.LineReader; import org.broadinstitute.hellbender.exceptions.UserException; import org.broadinstitute.hellbender.tools.copynumber.formats.CopyNumberFormatsUtils; import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.Metadata; import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.MetadataUtils; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.io.IOUtils; import org.broadinstitute.hellbender.utils.tsv.*; import java.io.*; import java.util.List; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.stream.Collectors; /** * Represents {@link METADATA} (which can be represented as a {@link SAMFileHeader}), * an immutable collection of records, * a set of mandatory column headers given by a {@link TableColumnCollection}, * and lambdas for reading and writing records. * * @author Samuel Lee &lt;slee@broadinstitute.org&gt; */ public abstract class AbstractRecordCollection<METADATA extends Metadata, RECORD> { private final METADATA metadata; private final ImmutableList<RECORD> records; private final TableColumnCollection mandatoryColumns; private final Function<DataLine, RECORD> recordFromDataLineDecoder; private final BiConsumer<RECORD, DataLine> recordToDataLineEncoder; /** * Constructor given the {@link METADATA}, the list of records, the mandatory column headers, * and the lambdas for reading and writing records. * * @param metadata {@link METADATA} (which can be represented as a {@link SAMFileHeader} * @param records list of records; may be empty * @param mandatoryColumns mandatory columns required to construct collection from a TSV file; cannot be empty * @param recordFromDataLineDecoder lambda for decoding a record from a {@link DataLine} when reading from a TSV file * @param recordToDataLineEncoder lambda for encoding a record to a {@link DataLine} when writing to a TSV file */ AbstractRecordCollection(final METADATA metadata, final List<RECORD> records, final TableColumnCollection mandatoryColumns, final Function<DataLine, RECORD> recordFromDataLineDecoder, final BiConsumer<RECORD, DataLine> recordToDataLineEncoder) { this.metadata = Utils.nonNull(metadata); this.records = ImmutableList.copyOf(Utils.nonNull(records)); this.mandatoryColumns = Utils.nonNull(mandatoryColumns); this.recordFromDataLineDecoder = Utils.nonNull(recordFromDataLineDecoder); this.recordToDataLineEncoder = Utils.nonNull(recordToDataLineEncoder); Utils.nonEmpty(mandatoryColumns.names()); } /** * Constructor given an input file, the mandatory column headers, and the lambdas for reading and writing records. * The list of records is read using the column headers and the appropriate lambda. * * @param inputFile TSV file; must contain a {@link SAMFileHeader} and mandatory column headers, but can contain no records * @param mandatoryColumns mandatory columns required to construct collection from a TSV file; cannot be empty * @param recordFromDataLineDecoder lambda for decoding a record from a {@link DataLine} when reading from a TSV file * @param recordToDataLineEncoder lambda for encoding a record to a {@link DataLine} when writing to a TSV file */ AbstractRecordCollection(final File inputFile, final TableColumnCollection mandatoryColumns, final Function<DataLine, RECORD> recordFromDataLineDecoder, final BiConsumer<RECORD, DataLine> recordToDataLineEncoder) { IOUtils.canReadFile(inputFile); this.mandatoryColumns = Utils.nonNull(mandatoryColumns); this.recordFromDataLineDecoder = Utils.nonNull(recordFromDataLineDecoder); this.recordToDataLineEncoder = Utils.nonNull(recordToDataLineEncoder); Utils.nonEmpty(mandatoryColumns.names()); try (final RecordCollectionReader reader = new RecordCollectionReader(inputFile)) { metadata = MetadataUtils.fromHeader(reader.getHeader(), getMetadataType()); TableUtils.checkMandatoryColumns(reader.columns(), mandatoryColumns, UserException.BadInput::new); records = reader.stream().collect(Collectors.collectingAndThen(Collectors.toList(), ImmutableList::copyOf)); } catch (final IOException | UncheckedIOException e) { throw new UserException.CouldNotReadInputFile(inputFile, e); } } public final int size() { return records.size(); } /** * Subclasses should add an enum to {@link Metadata.Type}, a corresponding switch case statement * to {@link MetadataUtils#fromHeader(SAMFileHeader, Metadata.Type)}, and implement this method accordingly. */ abstract Metadata.Type getMetadataType(); public METADATA getMetadata() { return metadata; } /** * @return an immutable view of the records contained in the collection */ public final List<RECORD> getRecords() { return records; } /** * Writes the records to file. */ public void write(final File outputFile) { try (final FileWriter writer = new FileWriter(outputFile)) { writer.write(metadata.toHeader().getSAMString()); } catch (final IOException e) { throw new UserException.CouldNotCreateOutputFile(outputFile, e); } try (final RecordWriter recordWriter = new RecordWriter(new FileWriter(outputFile, true))) { recordWriter.writeAllRecords(records); } catch (final IOException e) { throw new UserException.CouldNotCreateOutputFile(outputFile, e); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final AbstractRecordCollection<?, ?> that = (AbstractRecordCollection<?, ?>) o; return metadata.equals(that.metadata) && records.equals(that.records) && mandatoryColumns.equals(that.mandatoryColumns) && recordFromDataLineDecoder.equals(that.recordFromDataLineDecoder) && recordToDataLineEncoder.equals(that.recordToDataLineEncoder); } @Override public int hashCode() { int result = metadata.hashCode(); result = 31 * result + records.hashCode(); result = 31 * result + mandatoryColumns.hashCode(); result = 31 * result + recordFromDataLineDecoder.hashCode(); result = 31 * result + recordToDataLineEncoder.hashCode(); return result; } @Override public String toString() { return "AbstractRecordCollection{" + "metadata=" + metadata + ", records=" + records + '}'; } static String formatDouble(final double value) { return CopyNumberFormatsUtils.formatDouble(value); } final class RecordCollectionReader extends TableReader<RECORD> { private static final String COMMENT_PREFIX = CopyNumberFormatsUtils.COMMENT_PREFIX; //SAMTextHeaderCodec.HEADER_LINE_START; we need TableReader to treat SAM header as comment lines private final File file; RecordCollectionReader(final File file) throws IOException { super(file); this.file = file; } @Override protected RECORD createRecord(final DataLine dataLine) { Utils.nonNull(dataLine); return recordFromDataLineDecoder.apply(dataLine); } private SAMFileHeader getHeader() throws FileNotFoundException { final LineReader lineReader = new BufferedLineReader(new FileInputStream(file)); return new SAMTextHeaderCodec().decode(lineReader, getSource()); } @Override protected boolean isCommentLine(final String[] line) { return line.length > 0 && line[0].startsWith(COMMENT_PREFIX); } } final class RecordWriter extends TableWriter<RECORD> { RecordWriter(final Writer writer) throws IOException { super(writer, mandatoryColumns); } @Override protected void composeLine(final RECORD record, final DataLine dataLine) { Utils.nonNull(record); Utils.nonNull(dataLine); recordToDataLineEncoder.accept(record, dataLine); } } }
package io.github.kobakei.grenade; import android.content.Context; import android.content.Intent; import com.google.auto.service.AutoService; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.Filer; import javax.annotation.processing.Messager; import javax.annotation.processing.ProcessingEnvironment; import javax.annotation.processing.Processor; import javax.annotation.processing.RoundEnvironment; import javax.annotation.processing.SupportedAnnotationTypes; import javax.annotation.processing.SupportedSourceVersion; import javax.lang.model.SourceVersion; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.Element; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.ExecutableType; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; import javax.tools.Diagnostic; import io.github.kobakei.grenade.annotation.Extra; import io.github.kobakei.grenade.annotation.Navigator; import io.github.kobakei.grenade.annotation.OnActivityResult; import io.github.kobakei.grenade.annotation.Optional; @AutoService(Processor.class) public class GrenadeProcessor extends AbstractProcessor { private static final boolean LOGGABLE = false; private Filer filer; private Messager messager; private Elements elements; private Types types; private static final ClassName INTENT_CLASS = ClassName.get(Intent.class); private static final ClassName CONTEXT_CLASS = ClassName.get(Context.class); private static final ClassName PARCELER_CLASS = ClassName.get("org.parceler", "Parcels"); private static final Map<String, String> PUT_EXTRA_STATEMENTS = new HashMap<String, String>() {{ put("java.lang.Integer", "intent.putExtra($S, this.$L)"); put("java.lang.Long", "intent.putExtra($S, this.$L)"); put("java.lang.Short", "intent.putExtra($S, this.$L)"); put("java.lang.Float", "intent.putExtra($S, this.$L)"); put("java.lang.Double", "intent.putExtra($S, this.$L)"); put("java.lang.Boolean", "intent.putExtra($S, this.$L)"); put("java.lang.Byte", "intent.putExtra($S, this.$L)"); put("java.lang.Character", "intent.putExtra($S, this.$L)"); put("java.lang.String", "intent.putExtra($S, this.$L)"); put("java.lang.CharSequence", "intent.putExtra($S, this.$L)"); put("java.io.Serializable", "intent.putExtra($S, this.$L)"); put("android.os.Parcelable", "intent.putExtra($S, this.$L)"); put("android.os.Bundle", "intent.putExtra($S, this.$L)"); put("int[]", "intent.putExtra($S, this.$L)"); put("long[]", "intent.putExtra($S, this.$L)"); put("short[]", "intent.putExtra($S, this.$L)"); put("float[]", "intent.putExtra($S, this.$L)"); put("double[]", "intent.putExtra($S, this.$L)"); put("boolean[]", "intent.putExtra($S, this.$L)"); put("char[]", "intent.putExtra($S, this.$L)"); put("byte[]", "intent.putExtra($S, this.$L)"); put("java.lang.String[]", "intent.putExtra($S, this.$L)"); put("java.lang.CharSequence[]", "intent.putExtra($S, this.$L)"); put("android.os.Parcelable[]", "intent.putExtra($S, this.$L)"); put("java.util.ArrayList<java.lang.Integer>", "intent.putIntegerArrayListExtra($S, this.$L)"); put("java.util.ArrayList<java.lang.String>", "intent.putStringArrayListExtra($S, this.$L)"); put("java.util.ArrayList<java.lang.CharSequence>", "intent.putCharSequenceArrayListExtra($S, this.$L)"); put("java.util.ArrayList<android.os.Parcelable>", "intent.putParcelableArrayListExtra($S, this.$L)"); }}; private static final Map<String, String> PUT_EXTRA_STATEMENTS_2 = new HashMap<String, String>() {{ put("java.lang.Integer", "intent.putExtra($S, $L)"); put("java.lang.Long", "intent.putExtra($S, $L)"); put("java.lang.Short", "intent.putExtra($S, $L)"); put("java.lang.Float", "intent.putExtra($S, $L)"); put("java.lang.Double", "intent.putExtra($S, $L)"); put("java.lang.Boolean", "intent.putExtra($S, $L)"); put("java.lang.Byte", "intent.putExtra($S, $L)"); put("java.lang.Character", "intent.putExtra($S, $L)"); put("java.lang.String", "intent.putExtra($S, $L)"); put("java.lang.CharSequence", "intent.putExtra($S, $L)"); put("java.io.Serializable", "intent.putExtra($S, $L)"); put("android.os.Parcelable", "intent.putExtra($S, $L)"); put("android.os.Bundle", "intent.putExtra($S, $L)"); put("int[]", "intent.putExtra($S, $L)"); put("long[]", "intent.putExtra($S, $L)"); put("short[]", "intent.putExtra($S, $L)"); put("float[]", "intent.putExtra($S, $L)"); put("double[]", "intent.putExtra($S, $L)"); put("boolean[]", "intent.putExtra($S, $L)"); put("char[]", "intent.putExtra($S, $L)"); put("byte[]", "intent.putExtra($S, $L)"); put("java.lang.String[]", "intent.putExtra($S, $L)"); put("java.lang.CharSequence[]", "intent.putExtra($S, $L)"); put("android.os.Parcelable[]", "intent.putExtra($S, $L)"); put("java.util.ArrayList<java.lang.Integer>", "intent.putIntegerArrayListExtra($S, $L)"); put("java.util.ArrayList<java.lang.String>", "intent.putStringArrayListExtra($S, $L)"); put("java.util.ArrayList<java.lang.CharSequence>", "intent.putCharSequenceArrayListExtra($S, $L)"); put("java.util.ArrayList<android.os.Parcelable>", "intent.putParcelableArrayListExtra($S, $L)"); }}; private static final Map<String, String> GET_EXTRA_STATEMENTS = new HashMap<String, String>() {{ put("java.lang.Integer", "intent.getIntExtra($S, 0)"); put("java.lang.Long", "intent.getLongExtra($S, 0L)"); put("java.lang.Short", "intent.getShortExtra($S, (short)0)"); put("java.lang.Float", "intent.getFloatExtra($S, 0.0f)"); put("java.lang.Double", "intent.getDoubleExtra($S, 0.0)"); put("java.lang.Boolean", "intent.getBooleanExtra($S, false)"); put("java.lang.Byte", "intent.getByteExtra($S, (byte)0)"); put("java.lang.Character", "intent.getCharExtra($S, (char)0)"); put("java.lang.String", "intent.getStringExtra($S)"); put("java.lang.CharSequence", "intent.getCharSequenceExtra($S)"); put("java.io.Serializable", "intent.getSerializableExtra($S)"); put("android.os.Parcelable", "intent.getParcelableExtra($S)"); put("android.os.Bundle", "intent.getBundleExtra($S)"); put("int[]", "intent.getIntArrayExtra($S)"); put("long[]", "intent.getLongArrayExtra($S)"); put("short[]", "intent.getShortArrayExtra($S)"); put("float[]", "intent.getFloatArrayExtra($S)"); put("double[]", "intent.getDoubleArrayExtra($S)"); put("boolean[]", "intent.getBooleanArrayExtra($S)"); put("char[]", "intent.getCharArrayExtra($S)"); put("byte[]", "intent.getByteArrayExtra($S)"); put("java.lang.String[]", "intent.getStringArrayExtra($S)"); put("java.lang.CharSequence[]", "intent.getCharSequenceArrayExtra($S)"); put("android.os.Parcelable[]", "intent.getParcelableArrayExtra($S)"); put("java.util.ArrayList<java.lang.Integer>", "intent.getIntegerArrayListExtra($S)"); put("java.util.ArrayList<java.lang.String>", "intent.getStringArrayListExtra($S)"); put("java.util.ArrayList<java.lang.CharSequence>", "intent.getCharSequenceArrayListExtra($S)"); put("java.util.ArrayList<android.os.Parcelable>", "intent.getParcelableArrayListExtra($S)"); }}; // Parceler private static final String PARCELER_PUT_EXTRA_STATEMENT = "intent.putExtra($S, $T.wrap(this.$L))"; private static final String PARCELER_GET_EXTRA_STATEMENT = "target.$L = $T.unwrap(intent.getParcelableExtra($S))"; @Override public synchronized void init(ProcessingEnvironment processingEnv) { super.init(processingEnv); this.messager = processingEnv.getMessager(); this.filer = processingEnv.getFiler(); this.elements = processingEnv.getElementUtils(); this.types = processingEnv.getTypeUtils(); } @Override public Set<String> getSupportedAnnotationTypes() { Set<String> set = new HashSet<>(); set.add(Navigator.class.getCanonicalName()); return set; } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latestSupported(); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { Class<Navigator> navigatorClass = Navigator.class; for (Element element : roundEnv.getElementsAnnotatedWith(navigatorClass)) { log("Found navigator"); try { generateNavigator(element); } catch (IOException e) { logError("IO error"); } } return true; } /** * Genaret FooActivityNavigator class * @param element Element annotated with @Navigator. * @throws IOException */ private void generateNavigator(Element element) throws IOException { String className = element.getSimpleName().toString(); String packageName = elements.getPackageOf(element).getQualifiedName().toString(); String navigatorName = className + "Navigator"; ClassName targetClass = ClassName.get(packageName, className); ClassName navigatorClass = ClassName.get(packageName, navigatorName); // Class TypeSpec.Builder navigatorBuilder = TypeSpec.classBuilder(navigatorName) .addJavadoc("Launcher of $T", targetClass) .addModifiers(Modifier.PUBLIC); // Launcher annotation Navigator navigator = element.getAnnotation(Navigator.class); String[] rules = navigator.value(); // Find @Extra and @OnActivityResult List<Element> requiredExtraElements = new ArrayList<>(); List<Element> optionalExtraElements = new ArrayList<>(); List<Element> onActivityResultElements = new ArrayList<>(); for (Element elem : element.getEnclosedElements()) { // Extra Extra extra = elem.getAnnotation(Extra.class); if (extra != null) { Optional optional = elem.getAnnotation(Optional.class); if (optional != null) { log("Optional"); optionalExtraElements.add(elem); } else { log("Required"); requiredExtraElements.add(elem); } } // OAR OnActivityResult onActivityResult = elem.getAnnotation(OnActivityResult.class); if (onActivityResult != null) { onActivityResultElements.add(elem); } } // fields log("Adding fields"); for (Element e : requiredExtraElements) { addField(navigatorBuilder, e); } for (Element e : optionalExtraElements) { addField(navigatorBuilder, e); } // flag field FieldSpec flagFieldSpec = FieldSpec.builder(TypeName.INT, "flags", Modifier.PRIVATE) .build(); navigatorBuilder.addField(flagFieldSpec); // action field FieldSpec actionFieldSpec = FieldSpec.builder(TypeName.get(String.class), "action", Modifier.PRIVATE) .build(); navigatorBuilder.addField(actionFieldSpec); // Constructor log("Adding constructors"); if (rules.length == 0) { addConstructor(navigatorBuilder, requiredExtraElements); } else { for (String rule : rules) { addConstructor(navigatorBuilder, requiredExtraElements, rule); } } // set option value method log("Add optional methods"); for (Element optionalExtraElement : optionalExtraElements) { addOptionalExtraMethod(navigatorBuilder, navigatorClass, optionalExtraElement); } // add flags method log("Add flags method"); addFlagsMethod(navigatorBuilder, navigatorClass); // set action method log("Add action method"); addActionMethod(navigatorBuilder, navigatorClass); // build method log("Add build method"); addBuildMethod(navigatorBuilder, targetClass, requiredExtraElements, optionalExtraElements); // (static) inject method log("Add inject method"); addInjectMethod(navigatorBuilder, targetClass, requiredExtraElements, optionalExtraElements); // (static) resultFor method for each @OnActivityResult log("Add resultFor method"); addResultForMethods(navigatorBuilder, onActivityResultElements); // (static) onActivityResult method log("Add onActivity method"); if (onActivityResultElements.size() > 0) { addOnActivityResultMethod(navigatorBuilder, targetClass, onActivityResultElements); } // Write JavaFile.builder(packageName, navigatorBuilder.build()) .build() .writeTo(filer); } /** * Add field * @param navigatorBuilder * @param extraElement */ private void addField(TypeSpec.Builder navigatorBuilder, Element extraElement) { String fieldName = extraElement.getSimpleName().toString(); TypeName fieldType = TypeName.get(extraElement.asType()); FieldSpec fieldSpec = FieldSpec.builder(fieldType, fieldName, Modifier.PRIVATE) .build(); navigatorBuilder.addField(fieldSpec); } /** * Add constructor with params * @param navigatorBuilder * @param requiredElements */ private void addConstructor(TypeSpec.Builder navigatorBuilder, List<Element> requiredElements) { MethodSpec.Builder constructorSpecBuilder = MethodSpec.constructorBuilder() .addJavadoc("Constructor with required params") .addModifiers(Modifier.PUBLIC); for (Element e : requiredElements) { String fieldName = e.getSimpleName().toString(); TypeName fieldType = TypeName.get(e.asType()); constructorSpecBuilder.addParameter(fieldType, fieldName) .addStatement("this.$L = $L", fieldName, fieldName); } navigatorBuilder.addMethod(constructorSpecBuilder.build()); } /** * Add constructor with params and rule * @param navigatorBuilder * @param requiredExtraElements * @param rule */ private void addConstructor(TypeSpec.Builder navigatorBuilder, List<Element> requiredExtraElements, String rule) { List<String> tokens = Arrays.asList(rule.split(",")); MethodSpec.Builder constructorSpecBuilder = MethodSpec.constructorBuilder() .addJavadoc("Constructor with required params") .addModifiers(Modifier.PUBLIC); for (Element e : requiredExtraElements) { String fieldName = e.getSimpleName().toString(); TypeName fieldType = TypeName.get(e.asType()); if (tokens.contains(fieldName)) { constructorSpecBuilder.addParameter(fieldType, fieldName) .addStatement("this.$L = $L", fieldName, fieldName); } } navigatorBuilder.addMethod(constructorSpecBuilder.build()); } /** * Add optional value method. * @param navigatorBuilder * @param navigatorClass * @param optionalExtraElement */ private void addOptionalExtraMethod(TypeSpec.Builder navigatorBuilder, ClassName navigatorClass, Element optionalExtraElement) { String fieldName = optionalExtraElement.getSimpleName().toString(); TypeName fieldType = TypeName.get(optionalExtraElement.asType()); MethodSpec setOptionalSpec = MethodSpec.methodBuilder(fieldName) .addJavadoc("Set optional field") .addModifiers(Modifier.PUBLIC) .addParameter(fieldType, fieldName) .returns(navigatorClass) .addStatement("this.$L = $L", fieldName, fieldName) .addStatement("return this") .build(); navigatorBuilder.addMethod(setOptionalSpec); } /** * Add flags method * @param navigatorBuilder * @param navigatorClass */ private void addFlagsMethod(TypeSpec.Builder navigatorBuilder, ClassName navigatorClass) { MethodSpec flagsMethod = MethodSpec.methodBuilder("flags") .addJavadoc("Add intent flags") .addModifiers(Modifier.PUBLIC) .addParameter(TypeName.INT, "flags") .returns(navigatorClass) .addStatement("this.flags = flags") .addStatement("return this") .build(); navigatorBuilder.addMethod(flagsMethod); } /** * Add action method * @param navigatorBuilder * @param navigatorClass */ private void addActionMethod(TypeSpec.Builder navigatorBuilder, ClassName navigatorClass) { MethodSpec actionMethod = MethodSpec.methodBuilder("action") .addJavadoc("Set action") .addModifiers(Modifier.PUBLIC) .addParameter(TypeName.get(String.class), "action") .returns(navigatorClass) .addStatement("this.action = action") .addStatement("return this") .build(); navigatorBuilder.addMethod(actionMethod); } /** * Add static inject method * @param navigatorBuilder * @param targetClass * @param requiredExtraElements * @param optionalExtraElements */ private void addInjectMethod(TypeSpec.Builder navigatorBuilder, ClassName targetClass, List<Element> requiredExtraElements, List<Element> optionalExtraElements) { MethodSpec.Builder injectSpecBuilder = MethodSpec.methodBuilder("inject") .addJavadoc("Inject fields of activity from intent") .addModifiers(Modifier.PUBLIC, Modifier.STATIC) .addParameter(targetClass, "target") .addParameter(INTENT_CLASS, "intent"); for (Element e : requiredExtraElements) { addGetExtraStatement(injectSpecBuilder, e, false); } for (Element e : optionalExtraElements) { addGetExtraStatement(injectSpecBuilder, e, true); } navigatorBuilder.addMethod(injectSpecBuilder.build()); } /** * Add build method * @param navigatorBuilder * @param targetClass * @param requiredExtraElements * @param optionalExtraElements */ private void addBuildMethod(TypeSpec.Builder navigatorBuilder, ClassName targetClass, List<Element> requiredExtraElements, List<Element> optionalExtraElements) { MethodSpec.Builder buildSpecBuilder = MethodSpec.methodBuilder("build") .addJavadoc("Build intent") .addModifiers(Modifier.PUBLIC) .addParameter(CONTEXT_CLASS, "context") .returns(INTENT_CLASS) .addStatement("$T intent = new $T(context, $T.class)", INTENT_CLASS, INTENT_CLASS, targetClass); for (Element e : requiredExtraElements) { addPutExtraStatement(buildSpecBuilder, e); } for (Element e : optionalExtraElements) { addPutExtraStatement(buildSpecBuilder, e); } buildSpecBuilder .addStatement("intent.addFlags(this.flags)") .addStatement("intent.setAction(this.action)") .addStatement("return intent") .build(); navigatorBuilder.addMethod(buildSpecBuilder.build()); } /** * Add resultForXXX method * @param navigatorBuilder * @param onActivityResultElements */ private void addResultForMethods(TypeSpec.Builder navigatorBuilder, List<Element> onActivityResultElements) { for (Element oarElement : onActivityResultElements) { ExecutableType executableType = (ExecutableType) oarElement.asType(); if (executableType.getParameterTypes().size() <= 0) { continue; } String methodName = oarElement.getSimpleName().toString(); MethodSpec.Builder resultForSpecBuilder = MethodSpec.methodBuilder("resultFor" + StringUtils.beginCap(methodName)) .addJavadoc("Create result intent") .addModifiers(Modifier.PUBLIC, Modifier.STATIC) .returns(INTENT_CLASS); resultForSpecBuilder .addStatement("$T intent = new $T()", INTENT_CLASS, INTENT_CLASS); for (int i = 0; i < executableType.getParameterTypes().size(); i++) { TypeMirror paramTypeMirror = executableType.getParameterTypes().get(i); String key = "param" + i; TypeName paramType = TypeName.get(paramTypeMirror); TypeName boxedParamType = paramType.box(); resultForSpecBuilder.addParameter(paramType, key); resultForSpecBuilder.addStatement(PUT_EXTRA_STATEMENTS_2.get(boxedParamType.toString()), key, key); } resultForSpecBuilder .addStatement("return intent"); navigatorBuilder.addMethod(resultForSpecBuilder.build()); } } /** * Add onActivityResult method * @param navigatorBuilder * @param targetClass * @param onActivityResultElements */ private void addOnActivityResultMethod(TypeSpec.Builder navigatorBuilder, ClassName targetClass, List<Element> onActivityResultElements) { MethodSpec.Builder onActivityResultSpecBuilder = MethodSpec.methodBuilder("onActivityResult") .addJavadoc("Call this method in your Activity's onActivityResult") .addModifiers(Modifier.PUBLIC, Modifier.STATIC) .addParameter(targetClass, "target") .addParameter(TypeName.INT, "requestCode") .addParameter(TypeName.INT, "resultCode") .addParameter(INTENT_CLASS, "intent"); for (Element e : onActivityResultElements) { String methodName = e.getSimpleName().toString(); OnActivityResult oar = e.getAnnotation(OnActivityResult.class); onActivityResultSpecBuilder.beginControlFlow( "if (requestCode == $L && java.util.Arrays.asList($L).contains(resultCode))", oar.requestCode(), StringUtils.join(oar.resultCodes())); ExecutableType executableType = (ExecutableType) e.asType(); String args = ""; for (int i = 0; i < executableType.getParameterTypes().size(); i++) { TypeMirror paramTypeMirror = executableType.getParameterTypes().get(i); String key = "param" + i; TypeName paramType = TypeName.get(paramTypeMirror).box(); String statement = "$T $L = " + GET_EXTRA_STATEMENTS.get(paramType.toString()); onActivityResultSpecBuilder .addStatement(statement, paramTypeMirror, key, key); args += key; if (i < executableType.getParameterTypes().size() - 1) { args += ","; } } onActivityResultSpecBuilder .addStatement("target.$L($L)", methodName, args) .endControlFlow(); } navigatorBuilder.addMethod(onActivityResultSpecBuilder.build()); } /** * Add putXXXExtra statement to build method * @param buildSpecBuilder * @param e */ private void addPutExtraStatement(MethodSpec.Builder buildSpecBuilder, Element e) { String fieldName = e.getSimpleName().toString(); Extra extra = e.getAnnotation(Extra.class); String keyName = extra.key().length() > 0 ? extra.key() : fieldName; TypeName fieldType = TypeName.get(e.asType()).box(); if (shouldUseParceler(e)) { buildSpecBuilder.addStatement(PARCELER_PUT_EXTRA_STATEMENT, keyName, PARCELER_CLASS, fieldName); return; } else { String statement = PUT_EXTRA_STATEMENTS.get(fieldType.toString()); if (statement != null) { buildSpecBuilder.addStatement(statement, keyName, fieldName); return; } } logError("[putExtra] Unsupported type: " + fieldType.toString()); } /** * Add getXXXExtra statement to inject method * @param injectSpecBuilder * @param e * @param isOptional */ private void addGetExtraStatement(MethodSpec.Builder injectSpecBuilder, Element e, boolean isOptional) { String fieldName = e.getSimpleName().toString(); Extra extra = e.getAnnotation(Extra.class); String keyName = extra.key().length() > 0 ? extra.key() : fieldName; TypeName fieldType = TypeName.get(e.asType()).box(); if (isOptional) { injectSpecBuilder.beginControlFlow("if (intent.hasExtra($S))", fieldName); } if (shouldUseParceler(e)) { injectSpecBuilder.addStatement(PARCELER_GET_EXTRA_STATEMENT, fieldName, PARCELER_CLASS, keyName); } else { String statement = GET_EXTRA_STATEMENTS.get(fieldType.toString()); if (statement != null) { statement = "target.$L = " + statement; injectSpecBuilder.addStatement(statement, fieldName, keyName); } else { logError("[getExtra] Unsupported type: " + fieldType.toString()); } } if (isOptional) { injectSpecBuilder.endControlFlow(); } } private boolean hasAnnotation(Element e, String name) { if (e == null) { return false; } for (AnnotationMirror annotation : e.getAnnotationMirrors()) { if (annotation.getAnnotationType().asElement().getSimpleName().toString().equals(name)) { return true; } } return false; } private boolean shouldUseParceler(Element fieldElement) { log("field = " + fieldElement.getSimpleName().toString()); TypeElement typeElement = (TypeElement) types.asElement(fieldElement.asType()); if (typeElement == null) { return false; } if (hasAnnotation(typeElement, "Parcel")) { return true; } DeclaredType declaredType = (DeclaredType) fieldElement.asType(); for (TypeMirror genericParam : declaredType.getTypeArguments()) { log("gp = " + genericParam.toString()); if (hasAnnotation(types.asElement(genericParam), "Parcel")) { return true; } } return false; } private void log(String msg) { if (LOGGABLE) { this.messager.printMessage(Diagnostic.Kind.OTHER, msg); } } private void logError(String msg) { this.messager.printMessage(Diagnostic.Kind.ERROR, msg); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.admin.cli; import com.beust.jcommander.ParameterException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.collect.Sets; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.apache.pulsar.client.admin.PulsarAdminException; import org.apache.pulsar.client.api.MessageId; import org.apache.pulsar.client.impl.MessageIdImpl; import org.apache.pulsar.common.naming.NamespaceName; import org.apache.pulsar.common.naming.TopicDomain; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.policies.data.AuthAction; import org.apache.pulsar.common.util.ObjectMapperFactory; abstract class CliCommand { static String[] validatePropertyCluster(List<String> params) { return splitParameter(params, 2); } static String validateNamespace(List<String> params) { String namespace = checkArgument(params); return NamespaceName.get(namespace).toString(); } static String validateTopicName(List<String> params) { String topic = checkArgument(params); return TopicName.get(topic).toString(); } static String validatePersistentTopic(List<String> params) { String topic = checkArgument(params); TopicName topicName = TopicName.get(topic); if (topicName.getDomain() != TopicDomain.persistent) { throw new ParameterException("Need to provide a persistent topic name"); } return topicName.toString(); } static String validateNonPersistentTopic(List<String> params) { String topic = checkArgument(params); TopicName topicName = TopicName.get(topic); if (topicName.getDomain() != TopicDomain.non_persistent) { throw new ParameterException("Need to provide a non-persistent topic name"); } return topicName.toString(); } static void validateLatencySampleRate(int sampleRate) { if (sampleRate < 0) { throw new ParameterException( "Latency sample rate should be positive and non-zero (found " + sampleRate + ")"); } } static long validateSizeString(String s) { char last = s.charAt(s.length() - 1); String subStr = s.substring(0, s.length() - 1); long size; try { size = sizeUnit.contains(last) ? Long.parseLong(subStr) : Long.parseLong(s); } catch (IllegalArgumentException e) { throw new ParameterException(String.format("Invalid size '%s'. Valid formats are: %s", s, "(4096, 100K, 10M, 16G, 2T)")); } switch (last) { case 'k': case 'K': return size * 1024; case 'm': case 'M': return size * 1024 * 1024; case 'g': case 'G': return size * 1024 * 1024 * 1024; case 't': case 'T': return size * 1024 * 1024 * 1024 * 1024; default: return size; } } static MessageId validateMessageIdString(String resetMessageIdStr) throws PulsarAdminException { return validateMessageIdString(resetMessageIdStr, -1); } static MessageId validateMessageIdString(String resetMessageIdStr, int partitionIndex) throws PulsarAdminException { String[] messageId = resetMessageIdStr.split(":"); try { com.google.common.base.Preconditions.checkArgument(messageId.length == 2); return new MessageIdImpl(Long.parseLong(messageId[0]), Long.parseLong(messageId[1]), partitionIndex); } catch (Exception e) { throw new PulsarAdminException( "Invalid message id (must be in format: ledgerId:entryId) value " + resetMessageIdStr); } } static String checkArgument(List<String> arguments) { if (arguments.size() != 1) { throw new ParameterException("Need to provide just 1 parameter"); } return arguments.get(0); } private static String[] splitParameter(List<String> params, int n) { if (params.size() != 1) { throw new ParameterException("Need to provide just 1 parameter"); } String[] parts = params.get(0).split("/"); if (parts.length != n) { throw new ParameterException("Parameter format is incorrect"); } return parts; } static String getOneArgument(List<String> params) { if (params.size() != 1) { throw new ParameterException("Need to provide just 1 parameter"); } return params.get(0); } /** * * @param params * List of positional arguments * @param pos * Positional arguments start with index as 1 * @param maxArguments * Validate against max arguments * @return */ static String getOneArgument(List<String> params, int pos, int maxArguments) { if (params.size() != maxArguments) { throw new ParameterException(String.format("Need to provide %s parameters", maxArguments)); } return params.get(pos); } static Set<AuthAction> getAuthActions(List<String> actions) { Set<AuthAction> res = new TreeSet<>(); AuthAction authAction; for (String action : actions) { try { authAction = AuthAction.valueOf(action); } catch (IllegalArgumentException exception) { throw new ParameterException(String.format("Illegal auth action '%s'. Possible values: %s", action, Arrays.toString(AuthAction.values()))); } res.add(authAction); } return res; } <T> void print(List<T> items) { for (T item : items) { print(item); } } <K, V> void print(Map<K, V> items) { for (Map.Entry<K, V> entry : items.entrySet()) { print(entry.getKey() + " " + entry.getValue()); } } <T> void print(T item) { try { if (item instanceof String) { System.out.println(item); } else { System.out.println(writer.writeValueAsString(item)); } } catch (Exception e) { throw new RuntimeException(e); } } private static ObjectMapper mapper = ObjectMapperFactory.create(); private static ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); private static Set<Character> sizeUnit = Sets.newHashSet('k', 'K', 'm', 'M', 'g', 'G', 't', 'T'); abstract void run() throws Exception; }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.transport.nio; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.nio.channel.NioChannel; import org.elasticsearch.transport.nio.channel.NioSocketChannel; import org.elasticsearch.transport.nio.channel.WriteContext; import org.elasticsearch.transport.nio.utils.TestSelectionKey; import org.junit.Before; import java.io.IOException; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ClosedSelectorException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.util.HashSet; import java.util.Set; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class SocketSelectorTests extends ESTestCase { private SocketSelector socketSelector; private SocketEventHandler eventHandler; private NioSocketChannel channel; private TestSelectionKey selectionKey; private WriteContext writeContext; private HashSet<SelectionKey> keySet = new HashSet<>(); private ActionListener<NioChannel> listener; private NetworkBytesReference bufferReference = NetworkBytesReference.wrap(new BytesArray(new byte[1])); @Before @SuppressWarnings("unchecked") public void setUp() throws Exception { super.setUp(); eventHandler = mock(SocketEventHandler.class); channel = mock(NioSocketChannel.class); writeContext = mock(WriteContext.class); listener = mock(ActionListener.class); selectionKey = new TestSelectionKey(0); selectionKey.attach(channel); Selector rawSelector = mock(Selector.class); this.socketSelector = new SocketSelector(eventHandler, rawSelector); this.socketSelector.setThread(); when(rawSelector.selectedKeys()).thenReturn(keySet); when(rawSelector.select(0)).thenReturn(1); when(channel.getSelectionKey()).thenReturn(selectionKey); when(channel.getWriteContext()).thenReturn(writeContext); when(channel.isConnectComplete()).thenReturn(true); } public void testRegisterChannel() throws Exception { socketSelector.registerSocketChannel(channel); when(channel.register(socketSelector)).thenReturn(true); socketSelector.doSelect(0); verify(eventHandler).handleRegistration(channel); Set<NioChannel> registeredChannels = socketSelector.getRegisteredChannels(); assertEquals(1, registeredChannels.size()); assertTrue(registeredChannels.contains(channel)); } public void testRegisterChannelFails() throws Exception { socketSelector.registerSocketChannel(channel); when(channel.register(socketSelector)).thenReturn(false); socketSelector.doSelect(0); verify(channel, times(0)).finishConnect(); Set<NioChannel> registeredChannels = socketSelector.getRegisteredChannels(); assertEquals(0, registeredChannels.size()); assertFalse(registeredChannels.contains(channel)); } public void testRegisterChannelFailsDueToException() throws Exception { socketSelector.registerSocketChannel(channel); ClosedChannelException closedChannelException = new ClosedChannelException(); when(channel.register(socketSelector)).thenThrow(closedChannelException); socketSelector.doSelect(0); verify(eventHandler).registrationException(channel, closedChannelException); verify(channel, times(0)).finishConnect(); Set<NioChannel> registeredChannels = socketSelector.getRegisteredChannels(); assertEquals(0, registeredChannels.size()); assertFalse(registeredChannels.contains(channel)); } public void testSuccessfullyRegisterChannelWillConnect() throws Exception { socketSelector.registerSocketChannel(channel); when(channel.register(socketSelector)).thenReturn(true); when(channel.finishConnect()).thenReturn(true); socketSelector.doSelect(0); verify(eventHandler).handleConnect(channel); } public void testConnectIncompleteWillNotNotify() throws Exception { socketSelector.registerSocketChannel(channel); when(channel.register(socketSelector)).thenReturn(true); when(channel.finishConnect()).thenReturn(false); socketSelector.doSelect(0); verify(eventHandler, times(0)).handleConnect(channel); } public void testQueueWriteWhenNotRunning() throws Exception { socketSelector.close(false); socketSelector.queueWrite(new WriteOperation(channel, bufferReference, listener)); verify(listener).onFailure(any(ClosedSelectorException.class)); } public void testQueueWriteChannelIsNoLongerWritable() throws Exception { WriteOperation writeOperation = new WriteOperation(channel, bufferReference, listener); socketSelector.queueWrite(writeOperation); when(channel.isWritable()).thenReturn(false); socketSelector.doSelect(0); verify(writeContext, times(0)).queueWriteOperations(writeOperation); verify(listener).onFailure(any(ClosedChannelException.class)); } public void testQueueWriteSelectionKeyThrowsException() throws Exception { SelectionKey selectionKey = mock(SelectionKey.class); WriteOperation writeOperation = new WriteOperation(channel, bufferReference, listener); CancelledKeyException cancelledKeyException = new CancelledKeyException(); socketSelector.queueWrite(writeOperation); when(channel.isWritable()).thenReturn(true); when(channel.getSelectionKey()).thenReturn(selectionKey); when(selectionKey.interestOps(anyInt())).thenThrow(cancelledKeyException); socketSelector.doSelect(0); verify(writeContext, times(0)).queueWriteOperations(writeOperation); verify(listener).onFailure(cancelledKeyException); } public void testQueueWriteSuccessful() throws Exception { WriteOperation writeOperation = new WriteOperation(channel, bufferReference, listener); socketSelector.queueWrite(writeOperation); assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) == 0); when(channel.isWritable()).thenReturn(true); socketSelector.doSelect(0); verify(writeContext).queueWriteOperations(writeOperation); assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) != 0); } public void testQueueDirectlyInChannelBufferSuccessful() throws Exception { WriteOperation writeOperation = new WriteOperation(channel, bufferReference, listener); assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) == 0); when(channel.isWritable()).thenReturn(true); socketSelector.queueWriteInChannelBuffer(writeOperation); verify(writeContext).queueWriteOperations(writeOperation); assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) != 0); } public void testQueueDirectlyInChannelBufferSelectionKeyThrowsException() throws Exception { SelectionKey selectionKey = mock(SelectionKey.class); WriteOperation writeOperation = new WriteOperation(channel, bufferReference, listener); CancelledKeyException cancelledKeyException = new CancelledKeyException(); when(channel.isWritable()).thenReturn(true); when(channel.getSelectionKey()).thenReturn(selectionKey); when(selectionKey.interestOps(anyInt())).thenThrow(cancelledKeyException); socketSelector.queueWriteInChannelBuffer(writeOperation); verify(writeContext, times(0)).queueWriteOperations(writeOperation); verify(listener).onFailure(cancelledKeyException); } public void testConnectEvent() throws Exception { keySet.add(selectionKey); selectionKey.setReadyOps(SelectionKey.OP_CONNECT); when(channel.finishConnect()).thenReturn(true); socketSelector.doSelect(0); verify(eventHandler).handleConnect(channel); } public void testConnectEventFinishUnsuccessful() throws Exception { keySet.add(selectionKey); selectionKey.setReadyOps(SelectionKey.OP_CONNECT); when(channel.finishConnect()).thenReturn(false); socketSelector.doSelect(0); verify(eventHandler, times(0)).handleConnect(channel); } public void testConnectEventFinishThrowException() throws Exception { keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_CONNECT); when(channel.finishConnect()).thenThrow(ioException); socketSelector.doSelect(0); verify(eventHandler, times(0)).handleConnect(channel); verify(eventHandler).connectException(channel, ioException); } public void testWillNotConsiderWriteOrReadUntilConnectionComplete() throws Exception { keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_WRITE | SelectionKey.OP_READ); doThrow(ioException).when(eventHandler).handleWrite(channel); when(channel.isConnectComplete()).thenReturn(false); socketSelector.doSelect(0); verify(eventHandler, times(0)).handleWrite(channel); verify(eventHandler, times(0)).handleRead(channel); } public void testSuccessfulWriteEvent() throws Exception { keySet.add(selectionKey); selectionKey.setReadyOps(SelectionKey.OP_WRITE); socketSelector.doSelect(0); verify(eventHandler).handleWrite(channel); } public void testWriteEventWithException() throws Exception { keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_WRITE); doThrow(ioException).when(eventHandler).handleWrite(channel); socketSelector.doSelect(0); verify(eventHandler).writeException(channel, ioException); } public void testSuccessfulReadEvent() throws Exception { keySet.add(selectionKey); selectionKey.setReadyOps(SelectionKey.OP_READ); socketSelector.doSelect(0); verify(eventHandler).handleRead(channel); } public void testReadEventWithException() throws Exception { keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_READ); doThrow(ioException).when(eventHandler).handleRead(channel); socketSelector.doSelect(0); verify(eventHandler).readException(channel, ioException); } public void testCleanup() throws Exception { NioSocketChannel unRegisteredChannel = mock(NioSocketChannel.class); when(channel.register(socketSelector)).thenReturn(true); socketSelector.registerSocketChannel(channel); socketSelector.doSelect(0); NetworkBytesReference networkBuffer = NetworkBytesReference.wrap(new BytesArray(new byte[1])); socketSelector.queueWrite(new WriteOperation(mock(NioSocketChannel.class), networkBuffer, listener)); socketSelector.registerSocketChannel(unRegisteredChannel); socketSelector.cleanup(); verify(listener).onFailure(any(ClosedSelectorException.class)); verify(eventHandler).handleClose(channel); verify(eventHandler).handleClose(unRegisteredChannel); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. The ASF licenses this file to You * under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. For additional information regarding * copyright in this work, please see the NOTICE file in the top level * directory of this distribution. */ package org.apache.shindig.social.dataservice.integration; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Guice; import com.google.inject.Injector; import junit.framework.TestCase; import org.apache.shindig.common.testing.FakeGadgetToken; import org.apache.shindig.social.EasyMockTestCase; import org.apache.shindig.social.SocialApiTestsGuiceModule; import org.apache.shindig.social.core.util.BeanJsonConverter; import org.apache.shindig.social.core.util.BeanXStreamAtomConverter; import org.apache.shindig.social.core.util.BeanXStreamConverter; import org.apache.shindig.social.core.util.xstream.GuiceBeanProvider; import org.apache.shindig.social.core.util.xstream.XStream081Configuration; import org.apache.shindig.social.opensocial.service.DataServiceServlet; import org.apache.shindig.social.opensocial.service.HandlerDispatcher; import org.easymock.classextension.EasyMock; import org.json.JSONObject; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Vector; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; public abstract class AbstractLargeRestfulTests extends EasyMockTestCase { protected static final String XMLSCHEMA = " xmlns=\"http://ns.opensocial.org/2008/opensocial\" \n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n" + " xsi:schemaLocation=\"http://ns.opensocial.org/2008/opensocial classpath:opensocial.xsd\" "; protected static final String XSDRESOURCE = "opensocial.xsd"; private HttpServletRequest req; private HttpServletResponse res; private DataServiceServlet servlet; private static final FakeGadgetToken FAKE_GADGET_TOKEN = new FakeGadgetToken() .setOwnerId("john.doe").setViewerId("john.doe"); protected HttpServletRequest getRequest() { return req; } protected void setRequest(HttpServletRequest req) { this.req = req; } protected HttpServletResponse getResponse() { return res; } protected void setResponse(HttpServletResponse res) { this.res = res; } protected DataServiceServlet getServlet() { return servlet; } protected void setServlet(DataServiceServlet servlet) { this.servlet = servlet; } @Override protected void setUp() throws Exception { Injector injector = Guice.createInjector(new SocialApiTestsGuiceModule()); servlet = new DataServiceServlet(); servlet.setHandlerDispatcher(injector.getInstance(HandlerDispatcher.class)); servlet.setBeanConverters(new BeanJsonConverter(injector), new BeanXStreamConverter(new XStream081Configuration(injector)), new BeanXStreamAtomConverter(new XStream081Configuration(injector))); req = EasyMock.createMock(HttpServletRequest.class); res = EasyMock.createMock(HttpServletResponse.class); } protected String getResponse(String path, String method, String format, String contentType) throws Exception { return getResponse(path, method, Maps.<String, String> newHashMap(), "", format, contentType); } protected String getResponse(String path, String method, Map<String, String> extraParams, String format, String contentType) throws Exception { return getResponse(path, method, extraParams, "", format, contentType); } protected String getResponse(String path, String method, String postData, String format, String contentType) throws Exception { return getResponse(path, method, Maps.<String,String> newHashMap(), postData, format, contentType); } protected String getResponse(String path, String method, Map<String, String> extraParams, String postData, String format, String contentType) throws Exception { EasyMock.expect(req.getCharacterEncoding()).andStubReturn("UTF-8"); EasyMock.expect(req.getPathInfo()).andStubReturn(path); EasyMock.expect(req.getMethod()).andStubReturn(method); EasyMock.expect(req.getParameter("format")).andStubReturn(format); EasyMock.expect(req.getParameter("X-HTTP-Method-Override")).andStubReturn( method); EasyMock.expect(req.getAttribute(EasyMock.isA(String.class))).andReturn( FAKE_GADGET_TOKEN); Vector<String> vector = new Vector<String>(extraParams.keySet()); EasyMock.expect(req.getParameterNames()).andStubReturn(vector.elements()); for (Map.Entry<String, String> entry : extraParams.entrySet()) { if (entry.getValue() != null) { EasyMock.expect(req.getParameterValues(entry.getKey())).andStubReturn( new String[] { entry.getValue() }); } else { EasyMock.expect(req.getParameterValues(entry.getKey())).andStubReturn( new String[] {}); } } if (postData == null) { postData = ""; } if (!("GET").equals(method) && !("HEAD").equals(method)) { final InputStream stream = new ByteArrayInputStream(postData.getBytes()); ServletInputStream servletStream = new ServletInputStream() { @Override public int read() throws IOException { return stream.read(); } }; EasyMock.expect(req.getInputStream()).andReturn(servletStream); } ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); PrintWriter writer = new PrintWriter(outputStream); EasyMock.expect(res.getWriter()).andReturn(writer); res.setCharacterEncoding("UTF-8"); res.setContentType(contentType); EasyMock.replay(req, res); servlet.service(req, res); EasyMock.verify(req, res); EasyMock.reset(req, res); writer.flush(); return outputStream.toString(); } protected JSONObject getJson(String json) throws Exception { return new JSONObject(json); } /** * parse entry.content xml into a Map<> struct * * @param str * input content string * @return the map<> of <name, value> pairs from the content xml * @throws javax.xml.stream.XMLStreamException * If the str is not valid xml */ protected Map<String, String> parseXmlContent(String str) throws XMLStreamException { ByteArrayInputStream inStr = new ByteArrayInputStream(str.getBytes()); XMLInputFactory factory = XMLInputFactory.newInstance(); XMLStreamReader parser = factory.createXMLStreamReader(inStr); Map<String, String> columns = Maps.newHashMap(); while (true) { int event = parser.next(); if (event == XMLStreamConstants.END_DOCUMENT) { parser.close(); break; } else if (event == XMLStreamConstants.START_ELEMENT) { String name = parser.getLocalName(); int eventType = parser.next(); if (eventType == XMLStreamConstants.CHARACTERS) { String value = parser.getText(); columns.put(name, value); } } } return columns; } /** * Converts a node which child nodes into a map keyed on element names * containing the text inside each child node. * * @param n * the node to convert. * @return a map keyed on element name, containing the contents of each * element. */ protected Map<String, List<String>> childNodesToMap(Node n) { Map<String, List<String>> v = Maps.newHashMap(); NodeList result = n.getChildNodes(); for (int i = 0; i < result.getLength(); i++) { Node nv = result.item(i); if (nv.getNodeType() == Node.ELEMENT_NODE) { List<String> l = v.get(nv.getLocalName()); if (l == null) { l = Lists.newArrayList(); v.put(nv.getLocalName(), l); } l.add(nv.getTextContent()); } } return v; } /** * Converts <entry> <key>k</key> <value> <entry> <key>count</key> * <value>val</value> </entry> <entry> <key>lastUpdate</key> * <value>val</value> </entry> </value> </entry> * * To map.get("k").get("count") * * @param result * @return */ protected Map<String, Map<String, List<String>>> childNodesToMapofMap( NodeList result) { Map<String, Map<String, List<String>>> v = Maps.newHashMap(); for (int i = 0; i < result.getLength(); i++) { Map<String, List<Node>> keyValue = childNodesToNodeMap(result.item(i)); assertEquals(2, keyValue.size()); assertTrue(keyValue.containsKey("key")); assertTrue(keyValue.containsKey("value")); Node valueNode = keyValue.get("value").get(0); Node key = keyValue.get("key").get(0); NodeList entryList = valueNode.getChildNodes(); Map<String, List<String>> pv = Maps.newHashMap(); v.put(key.getTextContent(), pv); for (int j = 0; j < entryList.getLength(); j++) { Node n = entryList.item(j); if ("entry".equals(n.getNodeName())) { Map<String, List<String>> ve = childNodesToMap(entryList.item(j)); assertTrue(ve.containsKey("key")); List<String> l = pv.get(ve.get("key").get(0)); if ( l == null ) { l = Lists.newArrayList(); pv.put(ve.get("key").get(0), l); } l.add(ve.get("value").get(0)); } } } return v; } /** * @param personNode * @return */ protected Map<String, List<Node>> childNodesToNodeMap(Node n) { Map<String, List<Node>> v = Maps.newHashMap(); NodeList result = n.getChildNodes(); for (int i = 0; i < result.getLength(); i++) { Node nv = result.item(i); if (nv.getNodeType() == Node.ELEMENT_NODE) { List<Node> l = v.get(nv.getLocalName()); if (l == null) { l = Lists.newArrayList(); v.put(nv.getLocalName(), l); } l.add(nv); } } return v; } }
/* * Copyright 2013 JavaANPR contributors * Copyright 2006 Ondrej Martinsky * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ package net.sf.javaanpr.gui.windows; import net.sf.javaanpr.gui.tools.FileListModel; import net.sf.javaanpr.gui.tools.ImageFileFilter; import net.sf.javaanpr.imageanalysis.CarSnapshot; import net.sf.javaanpr.imageanalysis.Photo; import net.sf.javaanpr.jar.Main; import org.jdesktop.layout.GroupLayout; import org.jdesktop.layout.LayoutStyle; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; public class FrameMain extends JFrame { private static final long serialVersionUID = 0L; private CarSnapshot car; private BufferedImage panelCarContent; private JFileChooser fileChooser; private FileListModel fileListModel; private int selectedIndex = -1; private JMenuItem aboutItem; private JLabel bottomLine; private JMenuItem exitItem; private JList<Object> fileList; private JScrollPane fileListScrollPane; private JMenuItem helpItem; private JMenu helpMenu; private JMenu imageMenu; private JMenuBar menuBar; private JMenuItem openItem; private JPanel panelCar; private JLabel recognitionLabel; private JButton recognizeButton; /** * Creates new form MainFrame. */ public FrameMain() { this.initComponents(); this.fileChooser = new JFileChooser(); this.fileChooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); this.fileChooser.setMultiSelectionEnabled(true); this.fileChooser.setDialogTitle("Load snapshots"); // this.fileChooser.setFileFilter(new ImageFileFilter()); // TODO why not??? Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); int width = this.getWidth(); int height = this.getHeight(); this.setLocation((screenSize.width - width) / 2, (screenSize.height - height) / 2); this.setVisible(true); } /** * This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The * content of this method is always regenerated by the Form Editor. */ private void initComponents() { Font arial11 = new Font("Arial", 0, 11); this.recognitionLabel = new JLabel(); this.panelCar = new JPanel() { private static final long serialVersionUID = 0L; @Override public void paint(Graphics g) { super.paint(g); g.drawImage(FrameMain.this.panelCarContent, 0, 0, null); } }; this.fileListScrollPane = new JScrollPane(); this.fileList = new JList<Object>(); this.recognizeButton = new JButton(); this.bottomLine = new JLabel(); this.menuBar = new JMenuBar(); this.imageMenu = new JMenu(); this.openItem = new JMenuItem(); this.exitItem = new JMenuItem(); this.helpMenu = new JMenu(); this.aboutItem = new JMenuItem(); this.helpItem = new JMenuItem(); this.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); this.setTitle("JavaANPR"); this.setResizable(false); this.recognitionLabel.setBackground(new Color(0, 0, 0)); this.recognitionLabel.setFont(new Font("Arial", 0, 24)); this.recognitionLabel.setForeground(new Color(255, 204, 51)); this.recognitionLabel.setHorizontalAlignment(SwingConstants.CENTER); this.recognitionLabel.setText(null); this.recognitionLabel.setBorder(BorderFactory.createEtchedBorder()); this.recognitionLabel.setOpaque(true); this.panelCar.setBorder(BorderFactory.createEtchedBorder()); GroupLayout panelCarLayout = new GroupLayout(this.panelCar); this.panelCar.setLayout(panelCarLayout); panelCarLayout.setHorizontalGroup( panelCarLayout.createParallelGroup(GroupLayout.LEADING).add(0, 585, Short.MAX_VALUE)); panelCarLayout .setVerticalGroup(panelCarLayout.createParallelGroup(GroupLayout.LEADING).add(0, 477, Short.MAX_VALUE)); this.fileListScrollPane.setBorder(BorderFactory.createEtchedBorder()); this.fileListScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); this.fileList.setBackground(UIManager.getDefaults().getColor("Panel.background")); this.fileList.setFont(arial11); this.fileList.addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent evt) { FrameMain.this.fileListValueChanged(evt); } }); this.fileListScrollPane.setViewportView(this.fileList); this.recognizeButton.setFont(arial11); this.recognizeButton.setText("Recognize plate"); this.recognizeButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { FrameMain.this.recognizeButtonActionPerformed(evt); } }); this.bottomLine.setFont(arial11); this.menuBar.setFont(arial11); this.imageMenu.setText("Image"); this.imageMenu.setFont(arial11); this.openItem.setFont(arial11); this.openItem.setText("Load snapshots"); this.openItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { FrameMain.this.openItemActionPerformed(evt); } }); this.imageMenu.add(this.openItem); this.exitItem.setFont(arial11); this.exitItem.setText("Exit"); this.exitItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { FrameMain.this.exitItemActionPerformed(evt); } }); this.imageMenu.add(this.exitItem); this.menuBar.add(this.imageMenu); this.helpMenu.setText("Help"); this.helpMenu.setFont(arial11); this.aboutItem.setFont(arial11); this.aboutItem.setText("About"); this.aboutItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { FrameMain.this.aboutItemActionPerformed(evt); } }); this.helpMenu.add(this.aboutItem); this.helpItem.setFont(arial11); this.helpItem.setText("Help"); this.helpItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { FrameMain.this.helpItemActionPerformed(evt); } }); this.helpMenu.add(this.helpItem); this.menuBar.add(this.helpMenu); this.setJMenuBar(this.menuBar); GroupLayout layout = new GroupLayout(this.getContentPane()); this.getContentPane().setLayout(layout); // TODO refactor layout.setHorizontalGroup(layout.createParallelGroup(GroupLayout.LEADING) .add(layout.createSequentialGroup().addContainerGap() .add(layout.createParallelGroup(GroupLayout.TRAILING) .add(GroupLayout.LEADING, this.bottomLine, GroupLayout.DEFAULT_SIZE, 589, Short.MAX_VALUE) .add(GroupLayout.LEADING, this.panelCar, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)).addPreferredGap(LayoutStyle.RELATED) .add(layout.createParallelGroup(GroupLayout.TRAILING) .add(this.fileListScrollPane, GroupLayout.DEFAULT_SIZE, 190, Short.MAX_VALUE) .add(GroupLayout.LEADING, this.recognitionLabel, GroupLayout.DEFAULT_SIZE, 190, Short.MAX_VALUE) .add(this.recognizeButton, GroupLayout.DEFAULT_SIZE, 190, Short.MAX_VALUE)) .addContainerGap())); layout.setVerticalGroup(layout.createParallelGroup(GroupLayout.LEADING) .add(layout.createSequentialGroup().addContainerGap() .add(layout.createParallelGroup(GroupLayout.LEADING).add(layout.createSequentialGroup() .add(this.fileListScrollPane, GroupLayout.DEFAULT_SIZE, 402, Short.MAX_VALUE) .addPreferredGap(LayoutStyle.RELATED).add(this.recognizeButton) .addPreferredGap(LayoutStyle.RELATED) .add(this.recognitionLabel, GroupLayout.PREFERRED_SIZE, 44, GroupLayout.PREFERRED_SIZE)) .add(this.panelCar, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)).addPreferredGap(LayoutStyle.RELATED).add(this.bottomLine))); this.pack(); } private void helpItemActionPerformed(ActionEvent evt) { try { new FrameHelp(FrameHelp.MODE.SHOW_HELP); } catch (IOException e) { e.printStackTrace(); // TODO exception } } private void aboutItemActionPerformed(ActionEvent evt) { try { new FrameHelp(FrameHelp.MODE.SHOW_ABOUT); } catch (IOException e) { e.printStackTrace(); // TODO exception } } private void recognizeButtonActionPerformed(ActionEvent evt) { new RecognizeThread(this).start(); } private void fileListValueChanged(ListSelectionEvent evt) { int selectedNow = this.fileList.getSelectedIndex(); if ((selectedNow != -1)) { this.recognitionLabel.setText(this.fileListModel.fileList.elementAt(selectedNow).recognizedPlate); this.selectedIndex = selectedNow; String path = ((FileListModel.FileListModelEntry) this.fileListModel.getElementAt(selectedNow)).fullPath; new LoadImageThread(this, path).start(); } } private void exitItemActionPerformed(ActionEvent evt) { System.exit(0); } private void openItemActionPerformed(ActionEvent evt) { int returnValue; returnValue = this.fileChooser.showOpenDialog((Component) evt.getSource()); if (returnValue != JFileChooser.APPROVE_OPTION) { return; } File[] selectedFiles = this.fileChooser.getSelectedFiles(); this.fileListModel = new FileListModel(); for (File selectedFile : selectedFiles) { if (selectedFile.isFile()) { this.fileListModel.addFileListModelEntry(selectedFile.getName(), selectedFile.getAbsolutePath()); } else if (selectedFile.isDirectory()) { for (String fileName : selectedFile.list()) { if (ImageFileFilter.accept(fileName)) { this.fileListModel.addFileListModelEntry(fileName, selectedFile + File.separator + fileName); } } } } this.fileList.setModel(this.fileListModel); } public class RecognizeThread extends Thread { private FrameMain parentFrame = null; public RecognizeThread(FrameMain parentFrame) { this.parentFrame = parentFrame; } @Override public void run() { String recognizedText = ""; this.parentFrame.recognitionLabel.setText("processing..."); int index = this.parentFrame.selectedIndex; try { recognizedText = Main.systemLogic.recognize(this.parentFrame.car, false); } catch (Exception ex) { // TODO exception this.parentFrame.recognitionLabel.setText("failed"); ex.printStackTrace(); return; } this.parentFrame.recognitionLabel.setText(recognizedText); this.parentFrame.fileListModel.fileList.elementAt(index).recognizedPlate = recognizedText; } } public class LoadImageThread extends Thread { private FrameMain parentFrame = null; private String url = null; public LoadImageThread(FrameMain parentFrame, String url) { this.parentFrame = parentFrame; this.url = url; } @Override public void run() { try { this.parentFrame.car = new CarSnapshot(this.url); this.parentFrame.panelCarContent = this.parentFrame.car.duplicate().getImage(); this.parentFrame.panelCarContent = Photo.linearResizeBi(this.parentFrame.panelCarContent, this.parentFrame.panelCar.getWidth(), this.parentFrame.panelCar.getHeight()); this.parentFrame.panelCar.paint(this.parentFrame.panelCar.getGraphics()); } catch (IOException e) { e.printStackTrace(); } } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl; import com.hazelcast.concurrent.lock.LockService; import com.hazelcast.config.MapConfig; import com.hazelcast.map.impl.recordstore.RecordStore; import com.hazelcast.spi.DefaultObjectNamespace; import com.hazelcast.spi.ExecutionService; import com.hazelcast.spi.NodeEngine; import com.hazelcast.spi.OperationService; import com.hazelcast.spi.partition.IPartitionService; import com.hazelcast.spi.properties.GroupProperty; import com.hazelcast.spi.properties.HazelcastProperties; import com.hazelcast.util.ConcurrencyUtil; import com.hazelcast.util.ConstructorFunction; import com.hazelcast.util.ContextMutexFactory; import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static com.hazelcast.map.impl.MapKeyLoaderUtil.getMaxSizePerNode; public class PartitionContainer { final MapService mapService; final int partitionId; final ConcurrentMap<String, RecordStore> maps = new ConcurrentHashMap<String, RecordStore>(1000); final ConstructorFunction<String, RecordStore> recordStoreConstructor = new ConstructorFunction<String, RecordStore>() { @Override public RecordStore createNew(String name) { RecordStore recordStore = createRecordStore(name); recordStore.startLoading(); return recordStore; } }; final ConstructorFunction<String, RecordStore> recordStoreConstructorSkipLoading = new ConstructorFunction<String, RecordStore>() { @Override public RecordStore createNew(String name) { return createRecordStore(name); } }; final ConstructorFunction<String, RecordStore> recordStoreConstructorForHotRestart = new ConstructorFunction<String, RecordStore>() { @Override public RecordStore createNew(String name) { return createRecordStore(name); } }; /** * Flag to check if there is a {@link com.hazelcast.map.impl.operation.ClearExpiredOperation} * is running on this partition at this moment or not. */ volatile boolean hasRunningCleanup; volatile long lastCleanupTime; /** * Used when sorting partition containers in {@link com.hazelcast.map.impl.eviction.ExpirationManager} * A non-volatile copy of lastCleanupTime is used with two reasons. * <p/> * 1. We need an un-modified field during sorting. * 2. Decrease number of volatile reads. */ long lastCleanupTimeCopy; private final ContextMutexFactory contextMutexFactory = new ContextMutexFactory(); public PartitionContainer(final MapService mapService, final int partitionId) { this.mapService = mapService; this.partitionId = partitionId; } private RecordStore createRecordStore(String name) { MapServiceContext serviceContext = mapService.getMapServiceContext(); MapContainer mapContainer = serviceContext.getMapContainer(name); MapConfig mapConfig = mapContainer.getMapConfig(); NodeEngine nodeEngine = serviceContext.getNodeEngine(); IPartitionService ps = nodeEngine.getPartitionService(); OperationService opService = nodeEngine.getOperationService(); ExecutionService execService = nodeEngine.getExecutionService(); HazelcastProperties hazelcastProperties = nodeEngine.getProperties(); MapKeyLoader keyLoader = new MapKeyLoader(name, opService, ps, nodeEngine.getClusterService(), execService, mapContainer.toData()); keyLoader.setMaxBatch(hazelcastProperties.getInteger(GroupProperty.MAP_LOAD_CHUNK_SIZE)); keyLoader.setMaxSize(getMaxSizePerNode(mapConfig.getMaxSizeConfig())); keyLoader.setHasBackup(mapConfig.getTotalBackupCount() > 0); keyLoader.setMapOperationProvider(serviceContext.getMapOperationProvider(name)); RecordStore recordStore = serviceContext.createRecordStore(mapContainer, partitionId, keyLoader); recordStore.init(); return recordStore; } public ConcurrentMap<String, RecordStore> getMaps() { return maps; } public Collection<RecordStore> getAllRecordStores() { return maps.values(); } public int getPartitionId() { return partitionId; } public MapService getMapService() { return mapService; } public RecordStore getRecordStore(String name) { return ConcurrencyUtil.getOrPutSynchronized(maps, name, contextMutexFactory, recordStoreConstructor); } public RecordStore getRecordStore(String name, boolean skipLoadingOnCreate) { return ConcurrencyUtil.getOrPutSynchronized(maps, name, this, skipLoadingOnCreate ? recordStoreConstructorSkipLoading : recordStoreConstructor); } public RecordStore getRecordStoreForHotRestart(String name) { return ConcurrencyUtil.getOrPutSynchronized(maps, name, contextMutexFactory, recordStoreConstructorForHotRestart); } public RecordStore getExistingRecordStore(String mapName) { return maps.get(mapName); } public void destroyMap(MapContainer mapContainer) { String name = mapContainer.getName(); RecordStore recordStore = maps.remove(name); if (recordStore != null) { recordStore.destroy(); } else { // It can be that, map is used only for locking, // because of that RecordStore is not created. // We will try to remove/clear LockStore belonging to // this IMap partition. clearLockStore(name); } MapServiceContext mapServiceContext = mapService.getMapServiceContext(); if (mapServiceContext.removeMapContainer(mapContainer)) { mapContainer.onDestroy(); } mapServiceContext.removePartitioningStrategyFromCache(mapContainer.getName()); } private void clearLockStore(String name) { final NodeEngine nodeEngine = mapService.getMapServiceContext().getNodeEngine(); final LockService lockService = nodeEngine.getSharedService(LockService.SERVICE_NAME); if (lockService != null) { final DefaultObjectNamespace namespace = new DefaultObjectNamespace(MapService.SERVICE_NAME, name); lockService.clearLockStore(partitionId, namespace); } } public void clear(boolean onShutdown) { for (RecordStore recordStore : maps.values()) { recordStore.clearPartition(onShutdown); } maps.clear(); } public boolean hasRunningCleanup() { return hasRunningCleanup; } public void setHasRunningCleanup(boolean hasRunningCleanup) { this.hasRunningCleanup = hasRunningCleanup; } public long getLastCleanupTime() { return lastCleanupTime; } public void setLastCleanupTime(long lastCleanupTime) { this.lastCleanupTime = lastCleanupTime; } public long getLastCleanupTimeCopy() { return lastCleanupTimeCopy; } public void setLastCleanupTimeCopy(long lastCleanupTimeCopy) { this.lastCleanupTimeCopy = lastCleanupTimeCopy; } }
/* AbstractApacheHttpClient.java 1.00 Jan 26, 2015 * * Copyright (c) 2015, All rights reserved. */ package io.github.rampantlions.codetools.http; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.StatusLine; import org.apache.http.auth.AuthenticationException; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.AbstractHttpEntity; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicNameValuePair; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpParams; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.ExecutionContext; import org.apache.http.protocol.HttpContext; import org.apache.http.util.EntityUtils; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import retrofit.client.Header; import retrofit.client.Request; import retrofit.client.Response; import retrofit.mime.TypedByteArray; import retrofit.mime.TypedOutput; /** * Class <code>AbstractApacheHttpClient</code>. * Some segments of code in this class facilitates using Retrofit, Feign, Jersey, and DropWizard. * * @author <a href="josh.wiechman@gmail.com">Wiechman, Joshua</a> * @version 1.00, Jan 26, 2015 * @param <ClientType> the generic type */ @SuppressWarnings( "deprecation" ) public abstract class AbstractHttpClient< ClientType extends AbstractHttpClient< ClientType >> extends ClientUtilities< ClientType > implements retrofit.client.Client // , feign.Client { /** * Class <code>GenericEntityHttpRequest</code>. * This class is utilized by Retrofit. */ private static class GenericEntityHttpRequest extends HttpEntityEnclosingRequestBase { /** The method. */ private final String method; /** * Instantiates a new generic entity http request. * * @param request the request */ GenericEntityHttpRequest( final Request request ) { super(); method = request.getMethod(); setURI( URI.create( request.getUrl() ) ); /* Add all headers. */ for ( Header header : request.getHeaders() ) { addHeader( new BasicHeader( header.getName(), header.getValue() ) ); } /* Add the content body. */ setEntity( new TypedOutputEntity( request.getBody() ) ); } // GenericEntityHttpRequest( final feign.Request request ) // { // super(); // method = request.method(); // setURI( URI.create( request.url() ) ); // // /* Add all headers. */ // for ( Entry< String, Collection< String >> header : request.headers().entrySet() ) // { // StringBuilder val = new StringBuilder(); // for ( String valitem : header.getValue() ) // { // if ( val.length() > 0 ) // { // val.append( "," ); // } // val.append( valitem ); // } // addHeader( new BasicHeader( header.getKey(), val.toString() ) ); // } // // TypedOutput to = new TypedOutput(); // // /* Add the content body. */ // //setContentType( typedOutput.mimeType() ); // setEntity( new TypedOutputEntity( request.getBody() ) ); // } /** * getMethod * (non-Javadoc). * * @return the method * @see org.apache.http.client.methods.HttpRequestBase#getMethod() */ @Override public String getMethod() { return method; } } /** * Class <code>GenericHttpRequest</code>. * This class is utilized by Retrofit. */ private static class GenericHttpRequest extends HttpRequestBase { /** The method. */ private final String method; /** * Instantiates a new generic http request. * * @param request the request */ public GenericHttpRequest( final Request request ) { method = request.getMethod(); setURI( URI.create( request.getUrl() ) ); /* Add all headers. */ for ( Header header : request.getHeaders() ) { addHeader( new BasicHeader( header.getName(), header.getValue() ) ); } } /** * getMethod * (non-Javadoc). * * @return the method * @see org.apache.http.client.methods.HttpRequestBase#getMethod() */ @Override public String getMethod() { return method; } } /** * Container class for passing an entire {@link TypedOutput} as an {@link HttpEntity}. */ static class TypedOutputEntity extends AbstractHttpEntity { /** The typed output. */ final TypedOutput typedOutput; /** * Instantiates a new typed output entity. * * @param typedOutput the typed output */ TypedOutputEntity( final TypedOutput typedOutput ) { this.typedOutput = typedOutput; setContentType( typedOutput.mimeType() ); } /** * getContent * (non-Javadoc). * * @return the content * @throws IOException Signals that an I/O exception has occurred. * @see org.apache.http.HttpEntity#getContent() */ @Override public InputStream getContent() throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); typedOutput.writeTo( out ); return new ByteArrayInputStream( out.toByteArray() ); } /** * getContentLength * (non-Javadoc). * * @return the content length * @see org.apache.http.HttpEntity#getContentLength() */ @Override public long getContentLength() { return typedOutput.length(); } /** * isRepeatable * (non-Javadoc). * * @return true, if is repeatable * @see org.apache.http.HttpEntity#isRepeatable() */ @Override public boolean isRepeatable() { return true; } /** * isStreaming * (non-Javadoc). * * @return true, if is streaming * @see org.apache.http.HttpEntity#isStreaming() */ @Override public boolean isStreaming() { return false; } /** * writeTo * (non-Javadoc). * * @param out the out * @throws IOException Signals that an I/O exception has occurred. * @see org.apache.http.HttpEntity#writeTo(java.io.OutputStream) */ @Override public void writeTo( final OutputStream out ) throws IOException { typedOutput.writeTo( out ); } } /** * Creates the default client. * * @return the http client */ public static HttpClient createDefaultClient() { HttpParams params = new BasicHttpParams(); HttpConnectionParams.setConnectionTimeout( params, 800000 ); HttpConnectionParams.setSoTimeout( params, 800000 ); DefaultHttpClient client = new DefaultHttpClient( params ); ClientUtilities.setupIgnoreSecurityCert( client ); return client; } /** * Creates the request. * * @param request the request * @return the http uri request */ static HttpUriRequest createRequest( final Request request ) { if ( request.getBody() != null ) { return new GenericEntityHttpRequest( request ); } return new GenericHttpRequest( request ); } // static HttpUriRequest createRequest( final feign.Request request ) // { // if ( request.body() != null ) // { // return new GenericEntityHttpRequest( request ); // } // return new GenericHttpRequest( request ); // } /** * Parses the response. * * @param url the url * @param response the response * @return the response * @throws IOException Signals that an I/O exception has occurred. */ static Response parseResponseRetrofit( final String url, final HttpResponse response ) throws IOException { StatusLine statusLine = response.getStatusLine(); int status = statusLine.getStatusCode(); String reason = statusLine.getReasonPhrase(); List< Header > headers = new ArrayList< Header >(); String contentType = "application/octet-stream"; for ( org.apache.http.Header header : response.getAllHeaders() ) { String name = header.getName(); String value = header.getValue(); if ( "Content-Type".equalsIgnoreCase( name ) ) { contentType = value; } headers.add( new Header( name, value ) ); } TypedByteArray body = null; HttpEntity entity = response.getEntity(); if ( entity != null ) { byte[] bytes = EntityUtils.toByteArray( entity ); body = new TypedByteArray( contentType, bytes ); } return new Response( url, status, reason, headers, body ); } // static feign.Response parseResponseFeign( final String url, final HttpResponse response ) throws IOException // { // StatusLine statusLine = response.getStatusLine(); // int status = statusLine.getStatusCode(); // String reason = statusLine.getReasonPhrase(); // List< Header > headers = new ArrayList< Header >(); // String contentType = "application/octet-stream"; // for ( org.apache.http.Header header : response.getAllHeaders() ) // { // String name = header.getName(); // String value = header.getValue(); // if ( "Content-Type".equalsIgnoreCase( name ) ) // { // contentType = value; // } // headers.add( new Header( name, value ) ); // } // TypedByteArray body = null; // HttpEntity entity = response.getEntity(); // if ( entity != null ) // { // byte[] bytes = EntityUtils.toByteArray( entity ); // body = new TypedByteArray( contentType, bytes ); // } // feign.Response.Body // // return new feign.Response( status, reason, headers, body ); // } /** The use basic auth. */ protected boolean useBasicAuth = false; /** The client. */ protected final HttpClient client; /** The context. */ protected HttpContext context; /** The sso. */ protected boolean sso = false; /** * Instantiates a new abstract apache http client. * * @param client the client * @param context the context * @param credentials the credentials */ protected AbstractHttpClient( final HttpClient client, final HttpContext context, final UsernamePasswordCredentials credentials ) { this.client = ( client != null ) ? client : AbstractHttpClient.createDefaultClient(); this.context = ( context != null ) ? context : new BasicHttpContext(); this.credentials = credentials; } /** * Execute the specified {@code request} using the provided {@code client}. * * @param client the client * @param request the request * @return the http response * @throws IOException Signals that an I/O exception has occurred. */ protected HttpResponse execute( final HttpClient client, final HttpUriRequest request ) throws IOException { return client.execute( request, context ); } /** * execute * (non-Javadoc). * * @param request the request * @return the response * @throws IOException Signals that an I/O exception has occurred. * @see retrofit.client.Client#execute(retrofit.client.Request) */ @Override public Response execute( final Request request ) throws IOException { HttpUriRequest apacheRequest = AbstractHttpClient.createRequest( request ); if ( ( useBasicAuth ) && ( credentials != null ) ) { String string = "Basic " + Base64.encodeBase64String( ( credentials.getUserName() + ":" + credentials.getPassword() ).getBytes() ); apacheRequest.addHeader( "Accept", "application/json" ); apacheRequest.addHeader( "Authorization", string ); } HttpResponse apacheResponse = execute( client, apacheRequest ); return AbstractHttpClient.parseResponseRetrofit( request.getUrl(), apacheResponse ); } // @Override // public feign.Response execute( feign.Request paramRequest, feign.Request.Options paramOptions ) throws IOException // { // HttpUriRequest apacheRequest = AbstractHttpClient.createRequest( request ); // // if ( ( useBasicAuth ) && ( credentials != null ) ) // { // String string = "Basic " + Base64.encodeBase64String( ( credentials.getUserName() + ":" + credentials.getPassword() ).getBytes() ); // apacheRequest.addHeader( "Accept", "application/json" ); // apacheRequest.addHeader( "Authorization", string ); // } // HttpResponse apacheResponse = execute( client, apacheRequest ); // return AbstractHttpClient.parseResponseRetrofit( request.getUrl(), apacheResponse ); // } /** * Gets the. * * @param request the request * @return the http response * @throws ClientProtocolException the client protocol exception * @throws IOException Signals that an I/O exception has occurred. */ public HttpResponse get( HttpGet request ) throws ClientProtocolException, IOException { return client.execute( request, context ); } /** * Gets the. * * @param url the url * @return the http response * @throws ClientProtocolException the client protocol exception * @throws IOException Signals that an I/O exception has occurred. * @throws AuthenticationException the authentication exception */ public HttpResponse get( String url ) throws ClientProtocolException, IOException, AuthenticationException { HttpGet request = new HttpGet( url ); if ( ( useBasicAuth ) && ( credentials != null ) ) { request.addHeader( new BasicScheme().authenticate( credentials, request ) ); } for ( Entry< String, String > staticRequestParam : staticRequestParams.entrySet() ) { request.addHeader( staticRequestParam.getKey(), staticRequestParam.getValue() ); } return get( request ); } public Map< String, String > staticRequestParams = new HashMap< String, String >(); /** * Gets the. * * @param url the url * @param creds the creds * @return the http response * @throws ClientProtocolException the client protocol exception * @throws IOException Signals that an I/O exception has occurred. * @throws AuthenticationException the authentication exception */ public HttpResponse get( String url, UsernamePasswordCredentials creds ) throws ClientProtocolException, IOException, AuthenticationException { HttpGet request = new HttpGet( url ); for ( Entry< String, String > staticRequestParam : staticRequestParams.entrySet() ) { request.addHeader( staticRequestParam.getKey(), staticRequestParam.getValue() ); } request.addHeader( new BasicScheme().authenticate( creds, request ) ); return get( request ); } /** * Gets the client. * * @return the client */ public HttpClient getClient() { return client; } /** * Gets the context. * * @return the context */ public HttpContext getContext() { return context; } /** * Gets the string. * * @param url the url * @return the string * @throws ClientProtocolException the client protocol exception * @throws IOException Signals that an I/O exception has occurred. * @throws AuthenticationException the authentication exception */ public String getString( String url ) throws ClientProtocolException, IOException, AuthenticationException { HttpResponse response = get( url ); BufferedReader rd = new BufferedReader( new InputStreamReader( response.getEntity().getContent() ) ); StringBuffer result = new StringBuffer(); String line = ""; while ( ( line = rd.readLine() ) != null ) { result.append( line ); } return result.toString(); } /** * Gets the string. * * @param url the url * @param creds the creds * @return the string * @throws ClientProtocolException the client protocol exception * @throws IOException Signals that an I/O exception has occurred. * @throws AuthenticationException the authentication exception */ public String getString( String url, UsernamePasswordCredentials creds ) throws ClientProtocolException, IOException, AuthenticationException { HttpResponse response = get( url, creds ); BufferedReader rd = new BufferedReader( new InputStreamReader( response.getEntity().getContent() ) ); StringBuffer result = new StringBuffer(); String line = ""; while ( ( line = rd.readLine() ) != null ) { result.append( line ); } return result.toString(); } /** * Post exec wait. * * @param request the request * @return the http response */ protected HttpResponse postExecWait( HttpPost request ) { HttpResponse response = null; boolean notReady = true; while ( notReady ) { try { response = client.execute( request ); notReady = false; int status = response.getStatusLine().getStatusCode(); if ( status < 200 || status >= 400 ) { notReady = true; } } catch ( Exception e ) { notReady = true; } } return response; } /** * Process form. * * @param request the request * @param form the form * @return the http post * @throws UnsupportedEncodingException the unsupported encoding exception */ protected HttpPost processForm( HttpPost request, Element form ) throws UnsupportedEncodingException { List< NameValuePair > urlParameters = new ArrayList< NameValuePair >(); boolean loginProcessed = false; for ( Element tag : form.getElementsByTag( "input" ) ) { switch ( tag.attr( "name" ) ) { case "RelayState": case "SAMLRequest": case "SAMLResponse": case "opentoken": urlParameters.add( new BasicNameValuePair( tag.attr( "name" ), tag.attr( "value" ) ) ); break; case "pf.username": case "pf.pass": if ( !loginProcessed ) { if ( credentials != null ) { urlParameters.add( new BasicNameValuePair( "pf.username", credentials.getUserName() ) ); urlParameters.add( new BasicNameValuePair( "pf.pass", credentials.getPassword() ) ); urlParameters.add( new BasicNameValuePair( "pf.username", credentials.getUserName() ) ); urlParameters.add( new BasicNameValuePair( "pf.ok", "clicked" ) ); loginProcessed = true; } } break; default: break; } } request.setEntity( new UrlEncodedFormEntity( urlParameters ) ); return request; } /** * Response string. * * @param response the response * @return the string * @throws IllegalStateException the illegal state exception * @throws IOException Signals that an I/O exception has occurred. */ protected String responseString( HttpResponse response ) throws IllegalStateException, IOException { BufferedReader rd = new BufferedReader( new InputStreamReader( response.getEntity().getContent() ) ); StringBuffer result = new StringBuffer(); String line = ""; while ( ( line = rd.readLine() ) != null ) { result.append( line ); } return result.toString(); } /** * Sets the context. * * @param context the new context */ public void setContext( final HttpContext context ) { this.context = context; } }
/* * Copyright (C) 2015 Archie L. Cobbs. All rights reserved. */ package org.jsimpledb.parse; import com.google.common.base.Preconditions; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import org.jsimpledb.JSimpleDB; import org.jsimpledb.Session; import org.jsimpledb.core.Database; import org.jsimpledb.kv.KVDatabase; import org.jsimpledb.parse.expr.Value; import org.jsimpledb.parse.func.AbstractFunction; import org.jsimpledb.parse.func.AllFunction; import org.jsimpledb.parse.func.ConcatFunction; import org.jsimpledb.parse.func.CountFunction; import org.jsimpledb.parse.func.CreateFunction; import org.jsimpledb.parse.func.FilterFunction; import org.jsimpledb.parse.func.ForEachFunction; import org.jsimpledb.parse.func.Function; import org.jsimpledb.parse.func.InvertFunction; import org.jsimpledb.parse.func.LimitFunction; import org.jsimpledb.parse.func.ListFunction; import org.jsimpledb.parse.func.QueryCompositeIndexFunction; import org.jsimpledb.parse.func.QueryIndexFunction; import org.jsimpledb.parse.func.QueryListElementIndexFunction; import org.jsimpledb.parse.func.QueryMapValueIndexFunction; import org.jsimpledb.parse.func.QueryVersionFunction; import org.jsimpledb.parse.func.TransformFunction; import org.jsimpledb.parse.func.UpgradeFunction; import org.jsimpledb.parse.func.VersionFunction; /** * A {@link Session} with support for parsing Java expressions. */ public class ParseSession extends Session { private final LinkedHashSet<String> imports = new LinkedHashSet<>(); private final TreeMap<String, AbstractFunction> functions = new TreeMap<>(); private final TreeMap<String, Value> variables = new TreeMap<>(); // Constructors /** * Constructor for {@link org.jsimpledb.SessionMode#KEY_VALUE} mode. * * @param kvdb key/value database * @throws IllegalArgumentException if {@code kvdb} is null */ public ParseSession(KVDatabase kvdb) { super(kvdb); this.imports.add("java.lang.*"); } /** * Constructor for {@link org.jsimpledb.SessionMode#CORE_API} mode. * * @param db core database * @throws IllegalArgumentException if {@code db} is null */ public ParseSession(Database db) { super(db); this.imports.add("java.lang.*"); } /** * Constructor for {@link org.jsimpledb.SessionMode#JSIMPLEDB} mode. * * @param jdb database * @throws IllegalArgumentException if {@code jdb} is null */ public ParseSession(JSimpleDB jdb) { super(jdb); this.imports.add("java.lang.*"); } // Accessors /** * Get currently configured Java imports. * * <p> * Each entry should of the form {@code foo.bar.Name} or {@code foo.bar.*}. * </p> * * @return configured imports */ public Set<String> getImports() { return this.imports; } /** * Get the {@link AbstractFunction}s registered with this instance. * * @return registered functions indexed by name */ public SortedMap<String, AbstractFunction> getFunctions() { return this.functions; } /** * Get all variables set on this instance. * * @return variables indexed by name */ public SortedMap<String, Value> getVars() { return this.variables; } // Function registration /** * Register the standard built-in functions such as {@code all()}, {@code foreach()}, etc. */ public void registerStandardFunctions() { // We don't use AnnotatedClassScanner here to avoid having a dependency on the spring classes final Class<?>[] functionClasses = new Class<?>[] { AllFunction.class, ConcatFunction.class, CountFunction.class, CreateFunction.class, FilterFunction.class, ForEachFunction.class, InvertFunction.class, LimitFunction.class, ListFunction.class, QueryCompositeIndexFunction.class, QueryIndexFunction.class, QueryListElementIndexFunction.class, QueryMapValueIndexFunction.class, QueryVersionFunction.class, TransformFunction.class, UpgradeFunction.class, VersionFunction.class, }; for (Class<?> cl : functionClasses) { final Function annotation = cl.getAnnotation(Function.class); if (annotation != null && Arrays.asList(annotation.modes()).contains(this.getMode())) this.registerFunction(cl); } } /** * Create an instance of the specified class and register it as an {@link AbstractFunction}. * as appropriate. The class must have a public constructor taking either a single {@link ParseSession} parameter * or no parameters; they will be tried in that order. * * @param cl function class * @throws IllegalArgumentException if {@code cl} has no suitable constructor * @throws IllegalArgumentException if {@code cl} instantiation fails * @throws IllegalArgumentException if {@code cl} does not subclass {@link AbstractFunction} */ public void registerFunction(Class<?> cl) { if (!AbstractFunction.class.isAssignableFrom(cl)) throw new IllegalArgumentException(cl + " does not subclass " + AbstractFunction.class.getName()); final AbstractFunction function = this.instantiate(cl.asSubclass(AbstractFunction.class)); this.functions.put(function.getName(), function); } /** * Instantiate an instance of the given class. * The class must have a public constructor taking either a single {@link ParseSession} parameter * or no parameters; they will be tried in that order. */ private <T> T instantiate(Class<T> cl) { Throwable failure; try { return cl.getConstructor(ParseSession.class).newInstance(this); } catch (NoSuchMethodException e) { try { return cl.getConstructor().newInstance(); } catch (NoSuchMethodException e2) { throw new IllegalArgumentException("no suitable constructor found in class " + cl.getName()); } catch (Exception e2) { failure = e2; } } catch (Exception e) { failure = e; } if (failure instanceof InvocationTargetException) failure = failure.getCause(); throw new IllegalArgumentException("unable to instantiate class " + cl.getName() + ": " + failure, failure); } // Class name resolution /** * Resolve a class name against this instance's currently configured class imports. * * @param name class name * @return resolved class, or null if not found */ public Class<?> resolveClass(final String name) { final int firstDot = name.indexOf('.'); final String firstPart = firstDot != -1 ? name.substring(0, firstDot - 1) : name; final ArrayList<String> packages = new ArrayList<>(this.imports.size() + 1); packages.add(null); packages.addAll(this.imports); for (String pkg : packages) { // Get absolute class name String className; if (pkg == null) className = name; else if (pkg.endsWith(".*")) className = pkg.substring(0, pkg.length() - 1) + name; else { if (!firstPart.equals(pkg.substring(pkg.lastIndexOf('.') + 1, pkg.length() - 2))) continue; className = pkg.substring(0, pkg.length() - 2 - firstPart.length()) + name; } // Try package vs. nested classes while (true) { try { return Class.forName(className, false, Thread.currentThread().getContextClassLoader()); } catch (ClassNotFoundException e) { // not found } final int lastDot = className.lastIndexOf('.'); if (lastDot == -1) break; className = className.substring(0, lastDot) + "$" + className.substring(lastDot + 1); } } return null; } /** * Relativize the given class's name, so that it is as short as possible given the configured imports. * For example, for class {@link String} this will return {@code String}, but for class {@link ArrayList} * this will return {@code java.util.ArrayList} unless {@code java.util.*} has been imported. * * @param klass class whose name to relativize * @return relativized class name * @throws IllegalArgumentException if {@code klass} is null */ public String relativizeClassName(Class<?> klass) { Preconditions.checkArgument(klass != null, "null klass"); final String name = klass.getName(); for (int pos = name.lastIndexOf('.'); pos > 0; pos = name.lastIndexOf('.', pos - 1)) { final String shortName = name.substring(pos + 1); if (this.resolveClass(shortName) == klass) return shortName; } return klass.getName(); } // Action /** * Perform the given action within a new transaction associated with this instance. * * <p> * If {@code action} throws an {@link Exception}, it will be caught and handled by {@link #reportException reportException()} * and then false returned. * * @param action action to perform * @return true if {@code action} completed successfully, false if the transaction could not be created * or {@code action} threw an exception * @throws IllegalArgumentException if {@code action} is null * @throws IllegalStateException if there is already an open transaction associated with this instance */ public boolean performParseSessionAction(final Action action) { return this.performSessionAction(new Session.Action() { @Override public void run(Session session) throws Exception { action.run((ParseSession)session); } }); } /** * Associate the current {@link org.jsimpledb.JTransaction} with this instance while performing the given action. * * <p> * If {@code action} throws an {@link Exception}, it will be caught and handled by {@link #reportException reportException()} * and then false returned. * * <p> * There must be a {@link org.jsimpledb.JTransaction} open and * {@linkplain org.jsimpledb.JTransaction#getCurrent associated with the current thread}. * It will be left open when this method returns. * * <p> * This method safely handles re-entrant invocation. * * @param action action to perform * @return true if {@code action} completed successfully, false if {@code action} threw an exception * @throws IllegalArgumentException if {@code action} is null * @throws IllegalStateException if there is already an open transaction associated with this instance * @throws IllegalStateException if this instance is not in mode {@link org.jsimpledb.SessionMode#JSIMPLEDB} */ public boolean performParseSessionActionWithCurrentTransaction(final Action action) { return this.performSessionActionWithCurrentTransaction(new Session.Action() { @Override public void run(Session session) throws Exception { action.run((ParseSession)session); } }); } /** * Callback interface used by {@link ParseSession#performParseSessionAction ParseSession.performParseSessionAction()} * and {@link ParseSession#performParseSessionActionWithCurrentTransaction * ParseSession.performParseSessionActionWithCurrentTransaction()}. */ public interface Action { /** * Perform some action using the given {@link ParseSession} while a transaction is open. * * @param session session with open transaction * @throws Exception if an error occurs */ void run(ParseSession session) throws Exception; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a.scale; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.s3a.Constants; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.S3ATestUtils; import org.apache.hadoop.fs.s3a.Statistic; import org.apache.hadoop.fs.s3a.WriteOperationHelper; import org.apache.hadoop.fs.s3a.api.RequestFactory; import org.apache.hadoop.fs.statistics.IOStatistics; import org.apache.hadoop.fs.store.audit.AuditSpan; import org.apache.hadoop.util.functional.RemoteIterators; import org.junit.Test; import org.assertj.core.api.Assertions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectResult; import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY; import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY_KEEP; import static org.apache.hadoop.fs.s3a.Constants.S3_METADATA_STORE_IMPL; import static org.apache.hadoop.fs.s3a.Statistic.*; import static org.apache.hadoop.fs.s3a.S3ATestUtils.*; import static org.apache.hadoop.fs.contract.ContractTestUtils.*; import static org.apache.hadoop.fs.s3a.impl.CallableSupplier.submit; import static org.apache.hadoop.fs.s3a.impl.CallableSupplier.waitForCompletion; import static org.apache.hadoop.fs.statistics.IOStatisticAssertions.lookupCounterStatistic; import static org.apache.hadoop.fs.statistics.IOStatisticAssertions.verifyStatisticCounterValue; import static org.apache.hadoop.fs.statistics.IOStatisticsLogging.ioStatisticsToPrettyString; import static org.apache.hadoop.fs.statistics.IOStatisticsSupport.retrieveIOStatistics; import static org.apache.hadoop.fs.statistics.StoreStatisticNames.OBJECT_CONTINUE_LIST_REQUEST; import static org.apache.hadoop.fs.statistics.StoreStatisticNames.OBJECT_LIST_REQUEST; /** * Test the performance of listing files/directories. */ public class ITestS3ADirectoryPerformance extends S3AScaleTestBase { private static final Logger LOG = LoggerFactory.getLogger( ITestS3ADirectoryPerformance.class); @Test public void testListOperations() throws Throwable { describe("Test recursive list operations"); final Path scaleTestDir = path("testListOperations"); final Path listDir = new Path(scaleTestDir, "lists"); S3AFileSystem fs = getFileSystem(); // scale factor. int scale = getConf().getInt(KEY_DIRECTORY_COUNT, DEFAULT_DIRECTORY_COUNT); int width = scale; int depth = scale; int files = scale; MetricDiff metadataRequests = new MetricDiff(fs, OBJECT_METADATA_REQUESTS); MetricDiff listRequests = new MetricDiff(fs, Statistic.OBJECT_LIST_REQUEST); MetricDiff listContinueRequests = new MetricDiff(fs, OBJECT_CONTINUE_LIST_REQUESTS); MetricDiff listStatusCalls = new MetricDiff(fs, INVOCATION_LIST_FILES); MetricDiff getFileStatusCalls = new MetricDiff(fs, INVOCATION_GET_FILE_STATUS); NanoTimer createTimer = new NanoTimer(); TreeScanResults created = createSubdirs(fs, listDir, depth, width, files, 0); // add some empty directories int emptyDepth = 1 * scale; int emptyWidth = 3 * scale; created.add(createSubdirs(fs, listDir, emptyDepth, emptyWidth, 0, 0, "empty", "f-", "")); createTimer.end("Time to create %s", created); LOG.info("Time per operation: {}", toHuman(createTimer.nanosPerOperation(created.totalCount()))); printThenReset(LOG, metadataRequests, listRequests, listContinueRequests, listStatusCalls, getFileStatusCalls); describe("Listing files via treewalk"); try { // Scan the directory via an explicit tree walk. // This is the baseline for any listing speedups. NanoTimer treeWalkTimer = new NanoTimer(); TreeScanResults treewalkResults = treeWalk(fs, listDir); treeWalkTimer.end("List status via treewalk of %s", created); printThenReset(LOG, metadataRequests, listRequests, listContinueRequests, listStatusCalls, getFileStatusCalls); assertEquals("Files found in listFiles(recursive=true) " + " created=" + created + " listed=" + treewalkResults, created.getFileCount(), treewalkResults.getFileCount()); describe("Listing files via listFiles(recursive=true)"); // listFiles() does the recursion internally NanoTimer listFilesRecursiveTimer = new NanoTimer(); TreeScanResults listFilesResults = new TreeScanResults( fs.listFiles(listDir, true)); listFilesRecursiveTimer.end("listFiles(recursive=true) of %s", created); assertEquals("Files found in listFiles(recursive=true) " + " created=" + created + " listed=" + listFilesResults, created.getFileCount(), listFilesResults.getFileCount()); // only two list operations should have taken place print(LOG, metadataRequests, listRequests, listContinueRequests, listStatusCalls, getFileStatusCalls); if (!fs.hasMetadataStore()) { assertEquals(listRequests.toString(), 1, listRequests.diff()); } reset(metadataRequests, listRequests, listContinueRequests, listStatusCalls, getFileStatusCalls); } finally { describe("deletion"); // deletion at the end of the run NanoTimer deleteTimer = new NanoTimer(); fs.delete(listDir, true); deleteTimer.end("Deleting directory tree"); printThenReset(LOG, metadataRequests, listRequests, listContinueRequests, listStatusCalls, getFileStatusCalls); } } /** * This is quite a big test; it PUTs up a number of * files and then lists them in a filesystem set to ask for a small number * of files on each listing. * The standard listing API calls are all used, and then * delete() is invoked to verify that paged deletion works correctly too. */ @Test public void testMultiPagesListingPerformanceAndCorrectness() throws Throwable { describe("Check performance and correctness for multi page listing " + "using different listing api"); final Path dir = methodPath(); final int batchSize = 10; final int numOfPutRequests = 1000; final int eachFileProcessingTime = 10; final int numOfPutThreads = 50; Assertions.assertThat(numOfPutRequests % batchSize) .describedAs("Files put %d must be a multiple of list batch size %d", numOfPutRequests, batchSize) .isEqualTo(0); final Configuration conf = getConfigurationWithConfiguredBatchSize(batchSize); removeBaseAndBucketOverrides(conf, S3_METADATA_STORE_IMPL, DIRECTORY_MARKER_POLICY); // force directory markers = keep to save delete requests on every // file created. conf.set(DIRECTORY_MARKER_POLICY, DIRECTORY_MARKER_POLICY_KEEP); S3AFileSystem fs = (S3AFileSystem) FileSystem.get(dir.toUri(), conf); final List<String> originalListOfFiles = new ArrayList<>(); ExecutorService executorService = Executors .newFixedThreadPool(numOfPutThreads); NanoTimer uploadTimer = new NanoTimer(); try { assume("Test is only for raw fs", !fs.hasMetadataStore()); fs.create(dir); // create a span for the write operations final AuditSpan span = fs.getAuditSpanSource() .createSpan(OBJECT_PUT_REQUESTS.getSymbol(), dir.toString(), null); final WriteOperationHelper writeOperationHelper = fs.getWriteOperationHelper(); final RequestFactory requestFactory = writeOperationHelper.getRequestFactory(); List<CompletableFuture<PutObjectResult>> futures = new ArrayList<>(numOfPutRequests); for (int i=0; i<numOfPutRequests; i++) { Path file = new Path(dir, String.format("file-%03d", i)); originalListOfFiles.add(file.toString()); ObjectMetadata om = fs.newObjectMetadata(0L); PutObjectRequest put = requestFactory .newPutObjectRequest(fs.pathToKey(file), om, new FailingInputStream()); futures.add(submit(executorService, () -> writeOperationHelper.putObject(put))); } LOG.info("Waiting for PUTs to complete"); waitForCompletion(futures); uploadTimer.end("uploading %d files with a parallelism of %d", numOfPutRequests, numOfPutThreads); RemoteIterator<LocatedFileStatus> resIterator = fs.listFiles(dir, true); List<String> listUsingListFiles = new ArrayList<>(); NanoTimer timeUsingListFiles = new NanoTimer(); RemoteIterators.foreach(resIterator, st -> { listUsingListFiles.add(st.getPath().toString()); sleep(eachFileProcessingTime); }); LOG.info("Listing Statistics: {}", ioStatisticsToPrettyString( retrieveIOStatistics(resIterator))); timeUsingListFiles.end("listing %d files using listFiles() api with " + "batch size of %d including %dms of processing time" + " for each file", numOfPutRequests, batchSize, eachFileProcessingTime); Assertions.assertThat(listUsingListFiles) .describedAs("Listing results using listFiles() must" + " match with original list of files") .hasSameElementsAs(originalListOfFiles) .hasSize(numOfPutRequests); List<String> listUsingListStatus = new ArrayList<>(); NanoTimer timeUsingListStatus = new NanoTimer(); FileStatus[] fileStatuses = fs.listStatus(dir); for(FileStatus fileStatus : fileStatuses) { listUsingListStatus.add(fileStatus.getPath().toString()); sleep(eachFileProcessingTime); } timeUsingListStatus.end("listing %d files using listStatus() api with " + "batch size of %d including %dms of processing time" + " for each file", numOfPutRequests, batchSize, eachFileProcessingTime); Assertions.assertThat(listUsingListStatus) .describedAs("Listing results using listStatus() must" + "match with original list of files") .hasSameElementsAs(originalListOfFiles) .hasSize(numOfPutRequests); // Validate listing using listStatusIterator(). NanoTimer timeUsingListStatusItr = new NanoTimer(); List<String> listUsingListStatusItr = new ArrayList<>(); RemoteIterator<FileStatus> lsItr = fs.listStatusIterator(dir); RemoteIterators.foreach(lsItr, st -> { listUsingListStatusItr.add(st.getPath().toString()); sleep(eachFileProcessingTime); }); timeUsingListStatusItr.end("listing %d files using " + "listStatusIterator() api with batch size of %d " + "including %dms of processing time for each file", numOfPutRequests, batchSize, eachFileProcessingTime); Assertions.assertThat(listUsingListStatusItr) .describedAs("Listing results using listStatusIterator() must" + "match with original list of files") .hasSameElementsAs(originalListOfFiles) .hasSize(numOfPutRequests); // now validate the statistics returned by the listing // to be non-null and containing list and continue counters. IOStatistics lsStats = retrieveIOStatistics(lsItr); String statsReport = ioStatisticsToPrettyString(lsStats); LOG.info("Listing Statistics: {}", statsReport); verifyStatisticCounterValue(lsStats, OBJECT_LIST_REQUEST, 1); long continuations = lookupCounterStatistic(lsStats, OBJECT_CONTINUE_LIST_REQUEST); // calculate expected #of continuations int expectedContinuations = numOfPutRequests / batchSize - 1; Assertions.assertThat(continuations) .describedAs("%s in %s", OBJECT_CONTINUE_LIST_REQUEST, statsReport) .isEqualTo(expectedContinuations); List<String> listUsingListLocatedStatus = new ArrayList<>(); RemoteIterator<LocatedFileStatus> it = fs.listLocatedStatus(dir); RemoteIterators.foreach(it, st -> { listUsingListLocatedStatus.add(st.getPath().toString()); sleep(eachFileProcessingTime); }); final IOStatistics llsStats = retrieveIOStatistics(it); LOG.info("Listing Statistics: {}", ioStatisticsToPrettyString( llsStats)); verifyStatisticCounterValue(llsStats, OBJECT_CONTINUE_LIST_REQUEST, expectedContinuations); Assertions.assertThat(listUsingListLocatedStatus) .describedAs("Listing results using listLocatedStatus() must" + "match with original list of files") .hasSameElementsAs(originalListOfFiles); // delete in this FS so S3Guard is left out of it. // and so that the incremental listing is tested through // the delete operation. fs.delete(dir, true); } finally { executorService.shutdown(); // in case the previous delete was not reached. fs.delete(dir, true); LOG.info("FS statistics {}", ioStatisticsToPrettyString(fs.getIOStatistics())); fs.close(); } } /** * Input stream which always returns -1. */ private static final class FailingInputStream extends InputStream { @Override public int read() throws IOException { return -1; } } /** * Sleep briefly. * @param eachFileProcessingTime time to sleep. */ private void sleep(final int eachFileProcessingTime) { try { Thread.sleep(eachFileProcessingTime); } catch (InterruptedException ignored) { } } private Configuration getConfigurationWithConfiguredBatchSize(int batchSize) { Configuration conf = new Configuration(getFileSystem().getConf()); S3ATestUtils.disableFilesystemCaching(conf); conf.setInt(Constants.MAX_PAGING_KEYS, batchSize); return conf; } @Test public void testTimeToStatEmptyDirectory() throws Throwable { describe("Time to stat an empty directory"); Path path = path("empty"); getFileSystem().mkdirs(path); timeToStatPath(path); } @Test public void testTimeToStatNonEmptyDirectory() throws Throwable { describe("Time to stat a non-empty directory"); Path path = path("dir"); S3AFileSystem fs = getFileSystem(); fs.mkdirs(path); touch(fs, new Path(path, "file")); timeToStatPath(path); } @Test public void testTimeToStatFile() throws Throwable { describe("Time to stat a simple file"); Path path = path("file"); touch(getFileSystem(), path); timeToStatPath(path); } @Test public void testTimeToStatRoot() throws Throwable { describe("Time to stat the root path"); timeToStatPath(new Path("/")); } private void timeToStatPath(Path path) throws IOException { describe("Timing getFileStatus(\"%s\")", path); S3AFileSystem fs = getFileSystem(); MetricDiff metadataRequests = new MetricDiff(fs, Statistic.OBJECT_METADATA_REQUESTS); MetricDiff listRequests = new MetricDiff(fs, Statistic.OBJECT_LIST_REQUEST); long attempts = getOperationCount(); NanoTimer timer = new NanoTimer(); for (long l = 0; l < attempts; l++) { fs.getFileStatus(path); } timer.end("Time to execute %d getFileStatusCalls", attempts); LOG.info("Time per call: {}", toHuman(timer.nanosPerOperation(attempts))); LOG.info("metadata: {}", metadataRequests); LOG.info("metadata per operation {}", metadataRequests.diff() / attempts); LOG.info("listObjects: {}", listRequests); LOG.info("listObjects: per operation {}", listRequests.diff() / attempts); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.saml.processing.core.saml.v2.writers; import org.keycloak.dom.saml.v2.assertion.AttributeType; import org.keycloak.dom.saml.v2.assertion.NameIDType; import org.keycloak.dom.saml.v2.assertion.SubjectType; import org.keycloak.dom.saml.v2.protocol.ArtifactResolveType; import org.keycloak.dom.saml.v2.protocol.AttributeQueryType; import org.keycloak.dom.saml.v2.protocol.AuthnContextComparisonType; import org.keycloak.dom.saml.v2.protocol.AuthnRequestType; import org.keycloak.dom.saml.v2.protocol.LogoutRequestType; import org.keycloak.dom.saml.v2.protocol.NameIDPolicyType; import org.keycloak.dom.saml.v2.protocol.RequestedAuthnContextType; import org.keycloak.saml.common.constants.JBossSAMLConstants; import org.keycloak.saml.common.exceptions.ProcessingException; import org.keycloak.saml.common.util.StaxUtil; import org.keycloak.saml.common.util.StringUtil; import org.w3c.dom.Element; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamWriter; import java.net.URI; import java.util.List; import org.keycloak.dom.saml.v2.protocol.ExtensionsType; import static org.keycloak.saml.common.constants.JBossSAMLURIConstants.ASSERTION_NSURI; import static org.keycloak.saml.common.constants.JBossSAMLURIConstants.NAMEID_FORMAT_TRANSIENT; import static org.keycloak.saml.common.constants.JBossSAMLURIConstants.PROTOCOL_NSURI; /** * Writes a SAML2 Request Type to Stream * * @author Anil.Saldhana@redhat.com * @since Nov 2, 2010 */ public class SAMLRequestWriter extends BaseWriter { public SAMLRequestWriter(XMLStreamWriter writer) { super(writer); } /** * Write a {@code AuthnRequestType } to stream * * @param request * * @throws org.keycloak.saml.common.exceptions.ProcessingException */ public void write(AuthnRequestType request) throws ProcessingException { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.AUTHN_REQUEST.get(), PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, PROTOCOL_PREFIX, PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, ASSERTION_PREFIX, ASSERTION_NSURI.get()); StaxUtil.writeDefaultNameSpace(writer, ASSERTION_NSURI.get()); // Attributes StaxUtil.writeAttribute(writer, JBossSAMLConstants.ID.get(), request.getID()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.VERSION.get(), request.getVersion()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.ISSUE_INSTANT.get(), request.getIssueInstant().toString()); URI destination = request.getDestination(); if (destination != null) StaxUtil.writeAttribute(writer, JBossSAMLConstants.DESTINATION.get(), destination.toASCIIString()); String consent = request.getConsent(); if (StringUtil.isNotNull(consent)) StaxUtil.writeAttribute(writer, JBossSAMLConstants.CONSENT.get(), consent); URI assertionURL = request.getAssertionConsumerServiceURL(); if (assertionURL != null) StaxUtil.writeAttribute(writer, JBossSAMLConstants.ASSERTION_CONSUMER_SERVICE_URL.get(), assertionURL.toASCIIString()); Boolean forceAuthn = request.isForceAuthn(); if (forceAuthn != null) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.FORCE_AUTHN.get(), forceAuthn.toString()); } Boolean isPassive = request.isIsPassive(); // The AuthnRequest IsPassive attribute is optional and if omitted its default value is false. // Some IdPs refuse requests if the IsPassive attribute is present and set to false, so to // maximize compatibility we emit it only if it is set to true if (isPassive != null && isPassive == true) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.IS_PASSIVE.get(), isPassive.toString()); } URI protocolBinding = request.getProtocolBinding(); if (protocolBinding != null) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.PROTOCOL_BINDING.get(), protocolBinding.toString()); } Integer assertionIndex = request.getAssertionConsumerServiceIndex(); if (assertionIndex != null) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.ASSERTION_CONSUMER_SERVICE_INDEX.get(), assertionIndex.toString()); } Integer attrIndex = request.getAttributeConsumingServiceIndex(); if (attrIndex != null) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.ATTRIBUTE_CONSUMING_SERVICE_INDEX.get(), attrIndex.toString()); } String providerName = request.getProviderName(); if (StringUtil.isNotNull(providerName)) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.PROVIDER_NAME.get(), providerName); } NameIDType issuer = request.getIssuer(); if (issuer != null) { write(issuer, new QName(ASSERTION_NSURI.get(), JBossSAMLConstants.ISSUER.get(), ASSERTION_PREFIX), false); } SubjectType subject = request.getSubject(); if (subject != null) { write(subject); } Element sig = request.getSignature(); if (sig != null) { StaxUtil.writeDOMElement(writer, sig); } ExtensionsType extensions = request.getExtensions(); if (extensions != null && ! extensions.getAny().isEmpty()) { write(extensions); } NameIDPolicyType nameIDPolicy = request.getNameIDPolicy(); if (nameIDPolicy != null) { write(nameIDPolicy); } RequestedAuthnContextType requestedAuthnContext = request.getRequestedAuthnContext(); if (requestedAuthnContext != null) { write(requestedAuthnContext); } StaxUtil.writeEndElement(writer); StaxUtil.flush(writer); } /** * Write a {@code LogoutRequestType} to stream * * @param logOutRequest * * @throws ProcessingException */ public void write(LogoutRequestType logOutRequest) throws ProcessingException { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.LOGOUT_REQUEST.get(), PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, PROTOCOL_PREFIX, PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, ASSERTION_PREFIX, ASSERTION_NSURI.get()); StaxUtil.writeDefaultNameSpace(writer, ASSERTION_NSURI.get()); // Attributes StaxUtil.writeAttribute(writer, JBossSAMLConstants.ID.get(), logOutRequest.getID()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.VERSION.get(), logOutRequest.getVersion()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.ISSUE_INSTANT.get(), logOutRequest.getIssueInstant().toString()); URI destination = logOutRequest.getDestination(); if (destination != null) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.DESTINATION.get(), destination.toASCIIString()); } String consent = logOutRequest.getConsent(); if (StringUtil.isNotNull(consent)) StaxUtil.writeAttribute(writer, JBossSAMLConstants.CONSENT.get(), consent); NameIDType issuer = logOutRequest.getIssuer(); write(issuer, new QName(ASSERTION_NSURI.get(), JBossSAMLConstants.ISSUER.get(), ASSERTION_PREFIX)); Element signature = logOutRequest.getSignature(); if (signature != null) { StaxUtil.writeDOMElement(writer, signature); } ExtensionsType extensions = logOutRequest.getExtensions(); if (extensions != null && ! extensions.getAny().isEmpty()) { write(extensions); } NameIDType nameID = logOutRequest.getNameID(); if (nameID != null) { write(nameID, new QName(ASSERTION_NSURI.get(), JBossSAMLConstants.NAMEID.get(), ASSERTION_PREFIX)); } List<String> sessionIndexes = logOutRequest.getSessionIndex(); for (String sessionIndex : sessionIndexes) { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.SESSION_INDEX.get(), PROTOCOL_NSURI.get()); StaxUtil.writeCharacters(writer, sessionIndex); StaxUtil.writeEndElement(writer); StaxUtil.flush(writer); } StaxUtil.writeEndElement(writer); StaxUtil.flush(writer); } /** * Write a {@code NameIDPolicyType} to stream * * @param nameIDPolicy * * @throws ProcessingException */ public void write(NameIDPolicyType nameIDPolicy) throws ProcessingException { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.NAMEID_POLICY.get(), PROTOCOL_NSURI.get()); URI format = nameIDPolicy.getFormat(); if (format != null) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.FORMAT.get(), format.toASCIIString()); } String spNameQualifier = nameIDPolicy.getSPNameQualifier(); if (StringUtil.isNotNull(spNameQualifier)) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.SP_NAME_QUALIFIER.get(), spNameQualifier); } Boolean allowCreate = nameIDPolicy.isAllowCreate(); // The NameID AllowCreate attribute must not be used when using the transient NameID format. if (allowCreate != null && (format == null || !NAMEID_FORMAT_TRANSIENT.get().equals(format.toASCIIString()))) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.ALLOW_CREATE.get(), allowCreate.toString()); } StaxUtil.writeEndElement(writer); StaxUtil.flush(writer); } /** * Write a {@code RequestedAuthnContextType} to stream * * @param requestedAuthnContextType * * @throws ProcessingException */ public void write(RequestedAuthnContextType requestedAuthnContextType) throws ProcessingException { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.REQUESTED_AUTHN_CONTEXT.get(), PROTOCOL_NSURI.get()); AuthnContextComparisonType comparison = requestedAuthnContextType.getComparison(); if (comparison != null) { StaxUtil.writeAttribute(writer, JBossSAMLConstants.COMPARISON.get(), comparison.value()); } List<String> authnContextClassRef = requestedAuthnContextType.getAuthnContextClassRef(); if (authnContextClassRef != null && !authnContextClassRef.isEmpty()) { for (String classRef : authnContextClassRef) { StaxUtil.writeStartElement(writer, ASSERTION_PREFIX, JBossSAMLConstants.AUTHN_CONTEXT_CLASS_REF.get(), ASSERTION_NSURI.get()); StaxUtil.writeNameSpace(writer, ASSERTION_PREFIX, ASSERTION_NSURI.get()); StaxUtil.writeCharacters(writer, classRef); StaxUtil.writeEndElement(writer); } } List<String> authnContextDeclRef = requestedAuthnContextType.getAuthnContextDeclRef(); if (authnContextDeclRef != null && !authnContextDeclRef.isEmpty()) { for (String declRef : authnContextDeclRef) { StaxUtil.writeStartElement(writer, ASSERTION_PREFIX, JBossSAMLConstants.AUTHN_CONTEXT_DECL_REF.get(), ASSERTION_NSURI.get()); StaxUtil.writeNameSpace(writer, ASSERTION_PREFIX, ASSERTION_NSURI.get()); StaxUtil.writeCharacters(writer, declRef); StaxUtil.writeEndElement(writer); } } StaxUtil.writeEndElement(writer); StaxUtil.flush(writer); } public void write(ArtifactResolveType request) throws ProcessingException { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.ARTIFACT_RESOLVE.get(), PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, PROTOCOL_PREFIX, PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, ASSERTION_PREFIX, ASSERTION_NSURI.get()); StaxUtil.writeDefaultNameSpace(writer, ASSERTION_NSURI.get()); // Attributes StaxUtil.writeAttribute(writer, JBossSAMLConstants.ID.get(), request.getID()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.VERSION.get(), request.getVersion()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.ISSUE_INSTANT.get(), request.getIssueInstant().toString()); URI destination = request.getDestination(); if (destination != null) StaxUtil.writeAttribute(writer, JBossSAMLConstants.DESTINATION.get(), destination.toASCIIString()); String consent = request.getConsent(); if (StringUtil.isNotNull(consent)) StaxUtil.writeAttribute(writer, JBossSAMLConstants.CONSENT.get(), consent); NameIDType issuer = request.getIssuer(); if (issuer != null) { write(issuer, new QName(ASSERTION_NSURI.get(), JBossSAMLConstants.ISSUER.get(), ASSERTION_PREFIX)); } Element sig = request.getSignature(); if (sig != null) { StaxUtil.writeDOMElement(writer, sig); } ExtensionsType extensions = request.getExtensions(); if (extensions != null && ! extensions.getAny().isEmpty()) { write(extensions); } String artifact = request.getArtifact(); if (StringUtil.isNotNull(artifact)) { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.ARTIFACT.get(), PROTOCOL_NSURI.get()); StaxUtil.writeCharacters(writer, artifact); StaxUtil.writeEndElement(writer); } StaxUtil.writeEndElement(writer); StaxUtil.flush(writer); } public void write(AttributeQueryType request) throws ProcessingException { StaxUtil.writeStartElement(writer, PROTOCOL_PREFIX, JBossSAMLConstants.ATTRIBUTE_QUERY.get(), PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, PROTOCOL_PREFIX, PROTOCOL_NSURI.get()); StaxUtil.writeNameSpace(writer, ASSERTION_PREFIX, ASSERTION_NSURI.get()); StaxUtil.writeDefaultNameSpace(writer, ASSERTION_NSURI.get()); // Attributes StaxUtil.writeAttribute(writer, JBossSAMLConstants.ID.get(), request.getID()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.VERSION.get(), request.getVersion()); StaxUtil.writeAttribute(writer, JBossSAMLConstants.ISSUE_INSTANT.get(), request.getIssueInstant().toString()); URI destination = request.getDestination(); if (destination != null) StaxUtil.writeAttribute(writer, JBossSAMLConstants.DESTINATION.get(), destination.toASCIIString()); String consent = request.getConsent(); if (StringUtil.isNotNull(consent)) StaxUtil.writeAttribute(writer, JBossSAMLConstants.CONSENT.get(), consent); NameIDType issuer = request.getIssuer(); if (issuer != null) { write(issuer, new QName(ASSERTION_NSURI.get(), JBossSAMLConstants.ISSUER.get(), ASSERTION_PREFIX)); } Element sig = request.getSignature(); if (sig != null) { StaxUtil.writeDOMElement(writer, sig); } ExtensionsType extensions = request.getExtensions(); if (extensions != null && ! extensions.getAny().isEmpty()) { write(extensions); } SubjectType subject = request.getSubject(); if (subject != null) { write(subject); } List<AttributeType> attributes = request.getAttribute(); for (AttributeType attr : attributes) { write(attr); } StaxUtil.writeEndElement(writer); StaxUtil.flush(writer); } }
package com.conlini.es.tmdb.river.core; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.river.AbstractRiverComponent; import org.elasticsearch.river.River; import org.elasticsearch.river.RiverName; import org.elasticsearch.river.RiverSettings; import org.springframework.web.client.RestTemplate; import com.conlini.es.tmdb.river.core.ControlFlowManager.PHASE; import com.conlini.es.tmdb.river.core.ControlFlowManager.PHASE_STAGE; import com.conlini.es.tmdb.river.core.ControlFlowManager.PhaseStageListener; import com.conlini.es.tmdb.river.pojo.DiscoverResponse; import com.conlini.es.tmdb.river.pojo.DiscoverResult; import com.conlini.es.tmdb.river.pojo.Movie; import com.conlini.es.tmdb.river.pojo.SourceProvider; import com.conlini.es.tmdb.river.pojo.TV; public class TMDBRiver extends AbstractRiverComponent implements River, PhaseStageListener { private Client client; private String apiKey; private Integer maxPages; private boolean lastPageFetched = false; private Map<String, Object> mapping; private Integer bulkAPIThreshold = 100000; public static enum DISCOVERY_TYPE { MOVIE("/discover/movie", "movie", Constants.TYPE, Movie.class), TV( "/discover/tv", "tv", Constants.TYPE, TV.class), ALL(null, null, null, null); public final String path; public final String contentPath; public final String esType; public final Class<? extends SourceProvider> sourceClass; private DISCOVERY_TYPE(String path, String contentPath, String esType, Class<? extends SourceProvider> sourceClass) { this.path = path; this.esType = esType; this.contentPath = contentPath; this.sourceClass = sourceClass; } public String getPath() { return this.path; } public String getEsType() { return this.esType; } public String getContentPath() { return this.contentPath; } } private DISCOVERY_TYPE discoveryType = DISCOVERY_TYPE.MOVIE; private ControlFlowManager controlFlowManager; private BlockingQueue<List<DiscoverResult>> queues = new ArrayBlockingQueue<List<DiscoverResult>>( 1); private Map<String, String> filters; private int upperYearBound = -1; private int lowerYearBound = -1; private boolean canTerminate = true; @Inject protected TMDBRiver(RiverName riverName, RiverSettings settings, Client client) { super(riverName, settings); this.controlFlowManager = new ControlFlowManager(riverName.getName()); this.controlFlowManager.registerPhaseStageListener( PHASE.CONTENT_SCRAPE, PHASE_STAGE.COMPLETE, this); this.client = client; Map<String, Object> settingMap = settings.settings(); if (settingMap.containsKey("api_key")) { this.apiKey = (String) settingMap.get("api_key"); } if (settingMap.containsKey("discovery_type")) { String discovery_type = (String) settingMap.get("discovery_type"); if (discovery_type.equals("tv")) { discoveryType = DISCOVERY_TYPE.TV; } else if (discovery_type.equals("movie")) { discoveryType = DISCOVERY_TYPE.MOVIE; } } if (settingMap.containsKey("max_pages")) { maxPages = (Integer) settingMap.get("max_pages"); } if (settingMap.containsKey("content_mapping")) { logger.info("Found user defined mapping"); Map<String, Object> map = (Map<String, Object>) settingMap .get("content_mapping"); this.mapping = new HashMap<String, Object>(); this.mapping.put(Constants.TYPE, map); } if (settingMap.containsKey("bulk_api_threshold")) { bulkAPIThreshold = (Integer) settingMap.get("bulk_api_threshold"); } if (settingMap.containsKey("filters")) { this.filters = (Map<String, String>) settingMap.get("filters"); } if (settingMap.containsKey("year_range")) { String[] range = ((String) settingMap.get("year_range")).split("~"); this.lowerYearBound = Integer.parseInt(range[0]); this.upperYearBound = Integer.parseInt(range[1]); } // print all the settings that have been extracted. Assert that we // Received the api key. Don;t print it out for security reasons. logger.info(String.format("Recieved apiKey --> %s", (null != apiKey && !apiKey.equals("")))); logger.info(String.format("Discovery Type --> %s", discoveryType)); logger.info("String max_pages --> " + maxPages); logger.info("mapping --> " + mapping); logger.info("bulk_api_threshold --> " + bulkAPIThreshold); logger.info("Filters -- > " + filters); logger.info("Lower/Upper year bounds --> " + this.lowerYearBound + "/" + this.upperYearBound); } public RiverName riverName() { return this.riverName; } public void start() { logger.info(String.format("Starting %s river", riverName)); // check if the apiKey has been signalled. There is no point of // proceeding if that is not there if (null != apiKey && !apiKey.equals("")) { // intitalize the index if (!client.admin().indices().prepareExists(Constants.INDEX_NAME) .get().isExists()) { client.admin().indices().prepareCreate(Constants.INDEX_NAME) .get(); } // if a user defined mapping has been sent, update the mapping if (this.mapping != null) { client.admin().indices() .preparePutMapping(Constants.INDEX_NAME) .setType(Constants.TYPE).setSource(mapping).execute() .actionGet(); } RestTemplate template = APIUtil.initTemplate(); if (this.lowerYearBound != -1 && this.upperYearBound != -1) { this.canTerminate = false; addYearRange(); } String fetchUrl = buildFetchURL(); controlFlowManager.startContentScrape(apiKey, discoveryType, client, bulkAPIThreshold); computeMaxPage(template, fetchUrl); this.controlFlowManager.startPageScrape(apiKey, fetchUrl, maxPages); } else { logger.error("No API Key found. Nothing being pulled"); } } private String buildFetchURL() { String fetchUrl = Constants.basePath + discoveryType.getPath() + "?api_key={api_key}&page={page_no}"; if (filters != null && !filters.isEmpty()) { fetchUrl = APIUtil.addFilters(fetchUrl, filters); } logger.info("Fetch URL for discovery --> " + fetchUrl); return fetchUrl; } private void computeMaxPage(RestTemplate template, String fetchUrl) { DiscoverResponse response = template.getForObject(fetchUrl, DiscoverResponse.class, APIUtil.getVariableVals(apiKey, "1")); logger.info(String.format( "Received response for %d content. Fetching %d pages ", response.getTotalResults(), response.getTotalPages())); maxPages = this.maxPages == null ? response.getTotalPages() : (this.maxPages < response.getTotalPages() ? this.maxPages : response.getTotalPages()); logger.info("Max page computed --> " + maxPages); } private void addYearRange() { if (this.filters == null) { this.filters = new HashMap<String, String>(); } String lte = this.lowerYearBound + "-12-31"; String gte = this.lowerYearBound + "-01-01"; this.lowerYearBound++; if (discoveryType.equals(DISCOVERY_TYPE.MOVIE)) { filters.put("release_date.lte", lte); filters.put("release_date.gte", gte); } else if (discoveryType.equals(DISCOVERY_TYPE.TV)) { filters.put("first_air_date.lte", lte); filters.put("first_air_date.gte", gte); } } public void close() { logger.info("close called"); controlFlowManager.close(); } public Client getClient() { return client; } public void setClient(Client client) { this.client = client; } @Override public void onPhase(PHASE phase, PHASE_STAGE stage) { if (phase.equals(PHASE.CONTENT_SCRAPE) && stage.equals(PHASE_STAGE.COMPLETE) && canTerminate) { logger.debug("Done scrapping. Deleting mapping"); // delete the mapping. We are done with the scrape client.admin().indices().prepareDeleteMapping("_river") .setType(riverName.name()).execute(); } else if (phase.equals(PHASE.PAGE_SCRAPE) && stage.equals(PHASE_STAGE.COMPLETE)) { if (lowerYearBound > upperYearBound) { canTerminate = true; logger.info("Fetched complete year range. Can terminate"); } else { addYearRange(); String fetchUrl = buildFetchURL(); computeMaxPage(APIUtil.initTemplate(), fetchUrl); this.controlFlowManager.startPageScrape(apiKey, fetchUrl, maxPages); } } } }
/* * Copyright 2014 Unicon, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.unicon.cas.support.wsfederation; import net.unicon.cas.support.wsfederation.authentication.principal.WsFederationCredential; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.opensaml.DefaultBootstrap; import org.opensaml.saml1.core.Assertion; import org.opensaml.saml1.core.Attribute; import org.opensaml.saml1.core.Conditions; import org.opensaml.saml1.core.impl.AssertionImpl; import org.opensaml.ws.wsfed.RequestedSecurityToken; import org.opensaml.ws.wsfed.impl.RequestSecurityTokenResponseImpl; import org.opensaml.xml.Configuration; import org.opensaml.xml.ConfigurationException; import org.opensaml.xml.io.Unmarshaller; import org.opensaml.xml.io.UnmarshallerFactory; import org.opensaml.xml.parse.BasicParserPool; import org.opensaml.xml.schema.XSAny; import org.opensaml.xml.security.x509.BasicX509Credential; import org.opensaml.xml.security.x509.X509Credential; import org.opensaml.xml.signature.Signature; import org.opensaml.xml.signature.SignatureValidator; import org.opensaml.xml.validation.ValidationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.Resource; import org.w3c.dom.Document; import org.w3c.dom.Element; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.security.KeyFactory; import java.security.PublicKey; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.security.spec.X509EncodedKeySpec; import java.util.ArrayList; import java.util.HashMap; import java.util.List; /** * Helper class that does the heavy lifting with the openSaml library. * * @author John Gasper * @since 3.5.2 */ public final class WsFederationUtils { private static final Logger LOGGER = LoggerFactory.getLogger(WsFederationUtils.class); /** * Initialized the openSaml library. */ static { try { // Initialize the library DefaultBootstrap.bootstrap(); } catch (final ConfigurationException ex) { LOGGER.error(ex.getMessage()); } } /** * private constructor. */ private WsFederationUtils() { } /** * createCredentialFromToken converts a SAML 1.1 assertion to a WSFederationCredential. * * @param assertion the provided assertion * @return an equivalent credential. */ public static WsFederationCredential createCredentialFromToken(final Assertion assertion) { final DateTime retrievedOn = new DateTime().withZone(DateTimeZone.UTC); LOGGER.debug("createCredentialFromToken: retrieved on {}", retrievedOn); final WsFederationCredential credential = new WsFederationCredential(); credential.setRetrievedOn(retrievedOn); credential.setId(assertion.getID()); credential.setIssuer(assertion.getIssuer()); credential.setIssuedOn(assertion.getIssueInstant()); final Conditions conditions = assertion.getConditions(); if (conditions != null) { credential.setNotBefore(conditions.getNotBefore()); credential.setNotOnOrAfter(conditions.getNotOnOrAfter()); credential.setAudience(conditions.getAudienceRestrictionConditions().get(0).getAudiences().get(0).getUri()); } if (assertion.getAuthenticationStatements() != null && assertion.getAuthenticationStatements().size() > 0) { credential.setAuthenticationMethod(assertion.getAuthenticationStatements().get(0).getAuthenticationMethod()); } //retrieve an attributes from the assertion final HashMap<String, Object> attributes = new HashMap<String, Object>(); for (final Attribute item : assertion.getAttributeStatements().get(0).getAttributes()) { LOGGER.debug("createCredentialFromToken: processed attribute: {}", item.getAttributeName()); if (item.getAttributeValues().size() == 1) { attributes.put(item.getAttributeName(), ((XSAny) item.getAttributeValues().get(0)).getTextContent()); } else { final List<String> itemList = new ArrayList<String>(); for (int i = 0; i < item.getAttributeValues().size(); i++) { itemList.add(((XSAny) item.getAttributeValues().get(i)).getTextContent()); } if (!itemList.isEmpty()) { attributes.put(item.getAttributeName(), itemList); } } } credential.setAttributes(attributes); LOGGER.debug("createCredentialFromToken: {}", credential); return credential; } /** * getSigningCredential loads up an X509Credential from a file. * * @param resource the signing certificate file * @return an X509 credential */ public static X509Credential getSigningCredential(final Resource resource) { try (final InputStream inputStream = resource.getInputStream()) { //grab the certificate file final CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509"); final X509Certificate certificate = (X509Certificate) certificateFactory.generateCertificate(inputStream); //get the public key from the certificate final X509EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(certificate.getPublicKey().getEncoded()); //generate public key to validate signatures final KeyFactory keyFactory = KeyFactory.getInstance("RSA"); final PublicKey publicKey = keyFactory.generatePublic(publicKeySpec); //add the public key final BasicX509Credential publicCredential = new BasicX509Credential(); publicCredential.setPublicKey(publicKey); LOGGER.debug("getSigningCredential: key retrieved."); return publicCredential; } catch (final Exception ex) { LOGGER.error("I/O error retrieving the signing cert: {}", ex); return null; } } /** * parseTokenFromString converts a raw wresult and extracts it into an assertion. * * @param wresult the raw token returned by the IdP * @return an assertion */ public static Assertion parseTokenFromString(final String wresult) { try (final InputStream in = new ByteArrayInputStream(wresult.getBytes("UTF-8"))) { final BasicParserPool parserPool = new BasicParserPool(); parserPool.setNamespaceAware(true); final Document document = parserPool.parse(in); final Element metadataRoot = document.getDocumentElement(); final UnmarshallerFactory unmarshallerFactory = Configuration.getUnmarshallerFactory(); final Unmarshaller unmarshaller = unmarshallerFactory.getUnmarshaller(metadataRoot); final RequestSecurityTokenResponseImpl rsToken = (RequestSecurityTokenResponseImpl) unmarshaller.unmarshall(metadataRoot); //Get our SAML token final List<RequestedSecurityToken> rst = rsToken.getRequestedSecurityToken(); final AssertionImpl assertion = (AssertionImpl) rst.get(0).getSecurityTokens().get(0); if (assertion == null) { LOGGER.debug("parseTokenFromString: assertion null"); } else { LOGGER.debug("parseTokenFromString: {}", assertion); } return assertion; } catch (final Exception ex) { LOGGER.warn(ex.getMessage()); return null; } } /** * validateSignature checks to see if the signature on an assertion is valid. * * @param assertion a provided assertion * @param x509Creds list of x509certs to check. * @return true if the assertion's signature is valid, otherwise false */ public static boolean validateSignature(final Assertion assertion, final List<X509Credential> x509Creds) { SignatureValidator signatureValidator; for (final X509Credential cred : x509Creds) { try { signatureValidator = new SignatureValidator(cred); } catch (final Exception ex) { LOGGER.warn(ex.getMessage()); break; } //get the signature to validate from the response object final Signature signature = assertion.getSignature(); //try to validate try { signatureValidator.validate(signature); LOGGER.debug("validateSignature: Signature is valid."); return true; } catch (final ValidationException ex) { LOGGER.warn("validateSignature: Signature is NOT valid."); LOGGER.warn(ex.getMessage()); } } LOGGER.warn("validateSignature: Signature doesn't match any signing credential."); return false; } }
package com.rolfje.anonimatron.jdbc; import com.rolfje.anonimatron.anonymizer.AnonymizerService; import com.rolfje.anonimatron.anonymizer.ToLowerAnonymizer; import com.rolfje.anonimatron.configuration.Column; import com.rolfje.anonimatron.configuration.Configuration; import com.rolfje.anonimatron.configuration.Discriminator; import com.rolfje.anonimatron.configuration.Table; import org.apache.log4j.Logger; import java.io.File; import java.sql.*; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertNotEquals; public class JdbcAnonymizerServiceTest extends AbstractInMemoryHsqlDbTest { private static final Logger LOG = Logger.getLogger(JdbcAnonymizerServiceTest.class); public void testSimpleStrings() throws Exception { // Create a table with some easy testdata executeSql("create table TABLE1 (COL1 VARCHAR(200), ID IDENTITY)"); PreparedStatement p = connection .prepareStatement("insert into TABLE1 (COL1) values (?)"); for (int i = 0; i < 100; i++) { p.setString(1, "varcharstring-" + i); p.execute(); } p.close(); // See if the data got inserted assertEquals( "Data was not inserted.", 100, getIntResult("select count(*) from TABLE1 where COL1 like 'varcharstring%'")); // Create anonimatron configuration Configuration config = super.createConfiguration(); super.addToConfig(config, "TABLE1", "COL1", null); AnonymizerService anonymizerService = anonymize(config, 100); File f = File.createTempFile("test", ".xml"); f.deleteOnExit(); anonymizerService.getSynonymCache().toFile(f); LOG.debug("Synonyms written to " + f.getAbsolutePath()); // See if the data got anonymized assertEquals( "Data was not anonymized completely.", 0, getIntResult("select count(*) from TABLE1 where COL1 like 'varcharstring%'")); assertEquals( "Rows dissapeared from the data set.", 100, getIntResult("select count(*) from TABLE1 where COL1 not like 'varcharstring%'")); } public void testTooShortUUID() throws Exception { // Create a table with some easy testdata executeSql("create table TABLE1 (COL1 VARCHAR(1), ID IDENTITY)"); executeSql("insert into TABLE1 (COL1) values ('a')"); // Create anonimatron configuration Column col = new Column(); col.setName("COL1"); col.setType("UUID"); Configuration config = super.createConfiguration("TABLE1", col); try { // Anonymize the data. JdbcAnonymizerService serv = new JdbcAnonymizerService(config, new AnonymizerService()); serv.anonymize(); fail("Should not be able to store UUID in a 1 character varchar"); } catch (Exception e) { // Test passed. } } public void testDryRun() throws Exception { executeSql("create table TABLE1 (COL1 VARCHAR(16), ID IDENTITY)"); executeSql("insert into TABLE1 (COL1) values ('abcdefghijklmnop')"); // Create anonimatron configuration Configuration config = super.createConfiguration(); super.addToConfig(config, "TABLE1", "COL1", null); // Make this a dry run config.setDryrun(true); anonymize(config, 1); // Check the outcome, nothing should be changed Statement statement = connection.createStatement(); statement.execute("select * from TABLE1 order by ID"); ResultSet resultset = statement.getResultSet(); resultset.next(); assertEquals("abcdefghijklmnop", resultset.getString("COL1")); resultset.close(); statement.close(); // Make this a dry run config.setDryrun(false); anonymize(config, 1); // Check the outcome, data should be changed statement = connection.createStatement(); statement.execute("select * from TABLE1 order by ID"); resultset = statement.getResultSet(); resultset.next(); assertNotEquals("abcdefghijklmnop", resultset.getString("COL1")); resultset.close(); statement.close(); } public void testDiscriminatorOnly() throws Exception { executeSql("create table TABLE1 (id IDENTITY, value1 VARCHAR(100), value2 VARCHAR(100), key VARCHAR(100))"); executeSql("insert into TABLE1 (value1,value2,key) values ('A','X','NONE')"); executeSql("insert into TABLE1 (value1,value2,key) values ('B','X','EMAIL')"); // Create configuration for the table without any column configuration Configuration config = super.createConfiguration(); Table table = new Table(); table.setName("TABLE1"); config.getTables().add(table); // Add discriminator for the key column to anonymize value1 with an email address if key is EMAIL Discriminator discriminator = new Discriminator(); discriminator.setColumnName("key"); discriminator.setValue("EMAIL"); Column emailcol = new Column(); emailcol.setName("value1"); emailcol.setType("EMAIL_ADDRESS"); List<Column> emailcols = new ArrayList<>(); emailcols.add(emailcol); discriminator.setColumns(emailcols); List<Discriminator> discriminators = new ArrayList<>(); discriminators.add(discriminator); config.getTables().get(0).setDiscriminators(discriminators); anonymize(config, 2); Statement statement = connection.createStatement(); statement.execute("select * from TABLE1 order by ID"); ResultSet resultset = statement.getResultSet(); resultset.next(); assertEquals("A", resultset.getString("value1")); assertEquals("X", resultset.getString("value2")); assertEquals("NONE", resultset.getString("key")); resultset.next(); String value1 = resultset.getString("value1"); assertTrue("Did not contain example.com: " + value1, value1.contains("@example.com")); assertEquals("X", resultset.getString("value2")); assertEquals("EMAIL", resultset.getString("key")); resultset.close(); statement.close(); } public void testDiscriminators() throws Exception { executeSql("create table TABLE1 (id IDENTITY, value1 VARCHAR(100), value2 VARCHAR(100), key VARCHAR(100))"); executeSql("insert into TABLE1 (value1,value2,key) values ('A','X','NONE')"); executeSql("insert into TABLE1 (value1,value2,key) values ('B','Y','EMAIL')"); executeSql("insert into TABLE1 (value1,value2,key) values ('C','Z',null)"); // Create default column configuration Configuration config = super.createConfiguration(); super.addToConfig(config, "TABLE1", "value1", "TO_LOWER_CASE"); super.addToConfig(config, "TABLE1", "value2", "TO_LOWER_CASE"); super.addToConfig(config, "TABLE1", "key", "TO_LOWER_CASE"); ArrayList<String> anonymizerclasses = new ArrayList<>(); anonymizerclasses.add(ToLowerAnonymizer.class.getName()); config.setAnonymizerClasses(anonymizerclasses); // Add discriminator based on key Discriminator discriminator = new Discriminator(); discriminator.setColumnName("key"); discriminator.setValue("EMAIL"); Column emailcol = new Column(); emailcol.setName("value1"); emailcol.setType("EMAIL_ADDRESS"); List<Column> emailcols = new ArrayList<>(); emailcols.add(emailcol); discriminator.setColumns(emailcols); Discriminator discriminator2 = new Discriminator(); discriminator2.setColumnName("key"); discriminator2.setValue(null); Column uuidcol = new Column(); uuidcol.setName("value1"); uuidcol.setType("UUID"); List<Column> uuidcolscols = new ArrayList<>(); uuidcolscols.add(uuidcol); discriminator2.setColumns(uuidcolscols); List<Discriminator> discriminators = new ArrayList<>(); discriminators.add(discriminator); discriminators.add(discriminator2); config.getTables().get(0).setDiscriminators(discriminators); anonymize(config, 3); LOG.debug("Table contents for TABLE1: \n" + resultSetAsString("select * from TABLE1 order by ID")); // Check the outcome Statement statement = connection.createStatement(); statement.execute("select * from TABLE1 order by ID"); ResultSet resultset = statement.getResultSet(); resultset.next(); assertEquals("a", resultset.getString("value1")); assertEquals("x", resultset.getString("value2")); assertEquals("none", resultset.getString("key")); resultset.next(); assertTrue(resultset.getString("value1").contains("@example.com")); assertEquals("y", resultset.getString("value2")); assertEquals("email", resultset.getString("key")); resultset.next(); assertTrue(resultset.getString("value1").contains("-")); assertEquals("z", resultset.getString("value2")); assertNull(resultset.getString("key")); resultset.close(); statement.close(); } public void testProgressForMultipleTables() throws Exception { executeSql("create table TABLE1 (COL1 VARCHAR(200), ID IDENTITY)"); try (PreparedStatement p = connection.prepareStatement("insert into TABLE1 (COL1) values (?)")) { for (int i = 0; i < 1251; i++) { p.setString(1, "varcharstring-" + i); p.execute(); } } executeSql("create table TABLE2 (COL1 VARCHAR(200), ID IDENTITY)"); try (PreparedStatement p = connection.prepareStatement("insert into TABLE2 (COL1) values (?)")) { for (int i = 0; i < 1251; i++) { p.setString(1, "varcharstring-" + i); p.execute(); } } executeSql("create table TABLE3 (COL1 VARCHAR(200), ID IDENTITY)"); try (PreparedStatement p = connection.prepareStatement("insert into TABLE3 (COL1) values (?)")) { for (int i = 0; i < 1251; i++) { p.setString(1, "varcharstring-" + i); p.execute(); } } // Create anonimatron configuration Configuration config = super.createConfiguration(); addToConfig(config, "TABLE1", "COL1", null); addToConfig(config, "TABLE2", "COL1", null); addToConfig(config, "TABLE3", "COL1", null); anonymize(config, 3753); } public void testDataTypes() throws Exception { executeSql("create table TABLE1 (COL1 DATE, ID IDENTITY)"); try (PreparedStatement p = connection.prepareStatement("insert into TABLE1 (COL1) values (?)")) { for (int i = 0; i < 2; i++) { p.setDate(1, new Date(Math.round(System.currentTimeMillis() * Math.random()))); p.execute(); } } Configuration config = super.createConfiguration(); super.addToConfig(config, "TABLE1", "COL1", null); anonymize(config, 2); } private AnonymizerService anonymize(Configuration config, int numberOfRecords) throws Exception { // Anonymize the data. AnonymizerService anonymizerService = new AnonymizerService(); anonymizerService.registerAnonymizers(config.getAnonymizerClasses()); JdbcAnonymizerService serv = new JdbcAnonymizerService(config, anonymizerService); serv.anonymize(); assertEquals(numberOfRecords, serv.getProgress().getTotalitemstodo()); assertEquals(numberOfRecords, serv.getProgress().getTotalitemscompleted()); return anonymizerService; } private String resultSetAsString(String select) throws SQLException { try ( Statement statement = getStatementForSelect(select); ResultSet resultset = statement.getResultSet() ) { ResultSetMetaData rsmd = resultset.getMetaData(); int numCols = rsmd.getColumnCount(); StringBuilder sbuilder = new StringBuilder(); for (int i = 1; i <= numCols; i++) { sbuilder.append(rsmd.getColumnName(i)); sbuilder.append(";"); } sbuilder.append("\n"); while (resultset.next()) { for (int i = 1; i <= numCols; i++) { sbuilder.append(resultset.getObject(i)); sbuilder.append(";"); } sbuilder.append("\n"); } return sbuilder.toString(); } } private int getIntResult(String sql) throws Exception { try (Statement statement = getStatementForSelect(sql); ResultSet resultset = statement.getResultSet()) { resultset.next(); return resultset.getInt(1); } } private Statement getStatementForSelect(String select) throws SQLException { Statement statement = connection.createStatement(); statement.execute(select); return statement; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glacier.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Provides options for downloading output of an Amazon S3 Glacier job. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetJobOutputRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either specify * an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 Glacier uses the AWS * account ID associated with the credentials used to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * </p> */ private String accountId; /** * <p> * The name of the vault. * </p> */ private String vaultName; /** * <p> * The job ID whose data is downloaded. * </p> */ private String jobId; /** * <p> * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 bytes, * specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire output. * </p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you to * download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output you want * to download and you decide to download 128 MB chunks of data at a time, which is a total of eight Get Job Output * requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data was * received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of the * payload on the client and compare it with the checksum you received in the response to ensure you received all * the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate byte * range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the tree * hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, obtain job * information of the job that provided you the output. The response includes the checksum of the entire archive * stored in Amazon S3 Glacier. You compare this value with the checksum you computed to ensure you have downloaded * the entire archive content with no errors. * </p> * <p/></li> * </ol> */ private String range; /** * Default constructor for GetJobOutputRequest object. Callers should use the setter or fluent setter (with...) * methods to initialize the object after creating it. */ public GetJobOutputRequest() { } /** * Constructs a new GetJobOutputRequest object. Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param vaultName * The name of the vault. * @param jobId * The job ID whose data is downloaded. * @param range * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 * bytes, specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire * output.</p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you * to download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output * you want to download and you decide to download 128 MB chunks of data at a time, which is a total of eight * Get Job Output requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data * was received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of * the payload on the client and compare it with the checksum you received in the response to ensure you * received all the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate * byte range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the * tree hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, * obtain job information of the job that provided you the output. The response includes the checksum of the * entire archive stored in Amazon S3 Glacier. You compare this value with the checksum you computed to * ensure you have downloaded the entire archive content with no errors. * </p> * <p/></li> */ public GetJobOutputRequest(String vaultName, String jobId, String range) { setVaultName(vaultName); setJobId(jobId); setRange(range); } /** * Constructs a new GetJobOutputRequest object. Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param accountId * The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either * specify an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 * Glacier uses the AWS account ID associated with the credentials used to sign the request. If you use an * account ID, do not include any hyphens ('-') in the ID. * @param vaultName * The name of the vault. * @param jobId * The job ID whose data is downloaded. * @param range * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 * bytes, specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire * output.</p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you * to download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output * you want to download and you decide to download 128 MB chunks of data at a time, which is a total of eight * Get Job Output requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data * was received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of * the payload on the client and compare it with the checksum you received in the response to ensure you * received all the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate * byte range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the * tree hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, * obtain job information of the job that provided you the output. The response includes the checksum of the * entire archive stored in Amazon S3 Glacier. You compare this value with the checksum you computed to * ensure you have downloaded the entire archive content with no errors. * </p> * <p/></li> */ public GetJobOutputRequest(String accountId, String vaultName, String jobId, String range) { setAccountId(accountId); setVaultName(vaultName); setJobId(jobId); setRange(range); } /** * <p> * The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either specify * an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 Glacier uses the AWS * account ID associated with the credentials used to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * </p> * * @param accountId * The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either * specify an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 * Glacier uses the AWS account ID associated with the credentials used to sign the request. If you use an * account ID, do not include any hyphens ('-') in the ID. */ public void setAccountId(String accountId) { this.accountId = accountId; } /** * <p> * The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either specify * an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 Glacier uses the AWS * account ID associated with the credentials used to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * </p> * * @return The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either * specify an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 * Glacier uses the AWS account ID associated with the credentials used to sign the request. If you use an * account ID, do not include any hyphens ('-') in the ID. */ public String getAccountId() { return this.accountId; } /** * <p> * The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either specify * an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 Glacier uses the AWS * account ID associated with the credentials used to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * </p> * * @param accountId * The <code>AccountId</code> value is the AWS account ID of the account that owns the vault. You can either * specify an AWS account ID or optionally a single '<code>-</code>' (hyphen), in which case Amazon S3 * Glacier uses the AWS account ID associated with the credentials used to sign the request. If you use an * account ID, do not include any hyphens ('-') in the ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetJobOutputRequest withAccountId(String accountId) { setAccountId(accountId); return this; } /** * <p> * The name of the vault. * </p> * * @param vaultName * The name of the vault. */ public void setVaultName(String vaultName) { this.vaultName = vaultName; } /** * <p> * The name of the vault. * </p> * * @return The name of the vault. */ public String getVaultName() { return this.vaultName; } /** * <p> * The name of the vault. * </p> * * @param vaultName * The name of the vault. * @return Returns a reference to this object so that method calls can be chained together. */ public GetJobOutputRequest withVaultName(String vaultName) { setVaultName(vaultName); return this; } /** * <p> * The job ID whose data is downloaded. * </p> * * @param jobId * The job ID whose data is downloaded. */ public void setJobId(String jobId) { this.jobId = jobId; } /** * <p> * The job ID whose data is downloaded. * </p> * * @return The job ID whose data is downloaded. */ public String getJobId() { return this.jobId; } /** * <p> * The job ID whose data is downloaded. * </p> * * @param jobId * The job ID whose data is downloaded. * @return Returns a reference to this object so that method calls can be chained together. */ public GetJobOutputRequest withJobId(String jobId) { setJobId(jobId); return this; } /** * <p> * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 bytes, * specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire output. * </p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you to * download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output you want * to download and you decide to download 128 MB chunks of data at a time, which is a total of eight Get Job Output * requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data was * received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of the * payload on the client and compare it with the checksum you received in the response to ensure you received all * the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate byte * range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the tree * hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, obtain job * information of the job that provided you the output. The response includes the checksum of the entire archive * stored in Amazon S3 Glacier. You compare this value with the checksum you computed to ensure you have downloaded * the entire archive content with no errors. * </p> * <p/></li> * </ol> * * @param range * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 * bytes, specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire * output.</p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you * to download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output * you want to download and you decide to download 128 MB chunks of data at a time, which is a total of eight * Get Job Output requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data * was received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of * the payload on the client and compare it with the checksum you received in the response to ensure you * received all the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate * byte range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the * tree hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, * obtain job information of the job that provided you the output. The response includes the checksum of the * entire archive stored in Amazon S3 Glacier. You compare this value with the checksum you computed to * ensure you have downloaded the entire archive content with no errors. * </p> * <p/></li> */ public void setRange(String range) { this.range = range; } /** * <p> * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 bytes, * specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire output. * </p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you to * download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output you want * to download and you decide to download 128 MB chunks of data at a time, which is a total of eight Get Job Output * requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data was * received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of the * payload on the client and compare it with the checksum you received in the response to ensure you received all * the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate byte * range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the tree * hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, obtain job * information of the job that provided you the output. The response includes the checksum of the entire archive * stored in Amazon S3 Glacier. You compare this value with the checksum you computed to ensure you have downloaded * the entire archive content with no errors. * </p> * <p/></li> * </ol> * * @return The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 * bytes, specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire * output.</p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you * to download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job * output you want to download and you decide to download 128 MB chunks of data at a time, which is a total * of eight Get Job Output requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of * data was received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of * the payload on the client and compare it with the checksum you received in the response to ensure you * received all the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate * byte range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the * tree hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, * obtain job information of the job that provided you the output. The response includes the checksum of the * entire archive stored in Amazon S3 Glacier. You compare this value with the checksum you computed to * ensure you have downloaded the entire archive content with no errors. * </p> * <p/></li> */ public String getRange() { return this.range; } /** * <p> * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 bytes, * specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire output. * </p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you to * download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output you want * to download and you decide to download 128 MB chunks of data at a time, which is a total of eight Get Job Output * requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data was * received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of the * payload on the client and compare it with the checksum you received in the response to ensure you received all * the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate byte * range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the tree * hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, obtain job * information of the job that provided you the output. The response includes the checksum of the entire archive * stored in Amazon S3 Glacier. You compare this value with the checksum you computed to ensure you have downloaded * the entire archive content with no errors. * </p> * <p/></li> * </ol> * * @param range * The range of bytes to retrieve from the output. For example, if you want to download the first 1,048,576 * bytes, specify the range as <code>bytes=0-1048575</code>. By default, this operation downloads the entire * output.</p> * <p> * If the job output is large, then you can use a range to retrieve a portion of the output. This allows you * to download the entire output in smaller chunks of bytes. For example, suppose you have 1 GB of job output * you want to download and you decide to download 128 MB chunks of data at a time, which is a total of eight * Get Job Output requests. You use the following process to download the job output: * </p> * <ol> * <li> * <p> * Download a 128 MB chunk of output by specifying the appropriate byte range. Verify that all 128 MB of data * was received. * </p> * </li> * <li> * <p> * Along with the data, the response includes a SHA256 tree hash of the payload. You compute the checksum of * the payload on the client and compare it with the checksum you received in the response to ensure you * received all the expected data. * </p> * </li> * <li> * <p> * Repeat steps 1 and 2 for all the eight 128 MB chunks of output data, each time specifying the appropriate * byte range. * </p> * </li> * <li> * <p> * After downloading all the parts of the job output, you have a list of eight checksum values. Compute the * tree hash of these values to find the checksum of the entire output. Using the <a>DescribeJob</a> API, * obtain job information of the job that provided you the output. The response includes the checksum of the * entire archive stored in Amazon S3 Glacier. You compare this value with the checksum you computed to * ensure you have downloaded the entire archive content with no errors. * </p> * <p/></li> * @return Returns a reference to this object so that method calls can be chained together. */ public GetJobOutputRequest withRange(String range) { setRange(range); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAccountId() != null) sb.append("AccountId: ").append(getAccountId()).append(","); if (getVaultName() != null) sb.append("VaultName: ").append(getVaultName()).append(","); if (getJobId() != null) sb.append("JobId: ").append(getJobId()).append(","); if (getRange() != null) sb.append("Range: ").append(getRange()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetJobOutputRequest == false) return false; GetJobOutputRequest other = (GetJobOutputRequest) obj; if (other.getAccountId() == null ^ this.getAccountId() == null) return false; if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false) return false; if (other.getVaultName() == null ^ this.getVaultName() == null) return false; if (other.getVaultName() != null && other.getVaultName().equals(this.getVaultName()) == false) return false; if (other.getJobId() == null ^ this.getJobId() == null) return false; if (other.getJobId() != null && other.getJobId().equals(this.getJobId()) == false) return false; if (other.getRange() == null ^ this.getRange() == null) return false; if (other.getRange() != null && other.getRange().equals(this.getRange()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode()); hashCode = prime * hashCode + ((getVaultName() == null) ? 0 : getVaultName().hashCode()); hashCode = prime * hashCode + ((getJobId() == null) ? 0 : getJobId().hashCode()); hashCode = prime * hashCode + ((getRange() == null) ? 0 : getRange().hashCode()); return hashCode; } @Override public GetJobOutputRequest clone() { return (GetJobOutputRequest) super.clone(); } }
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.content; import android.app.SearchManager; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.net.Uri; import android.text.TextUtils; import android.util.Log; /** * This superclass can be used to create a simple search suggestions provider for your application. * It creates suggestions (as the user types) based on recent queries and/or recent views. * * <p>In order to use this class, you must do the following. * * <ul> * <li>Implement and test query search, as described in {@link android.app.SearchManager}. (This * provider will send any suggested queries via the standard * {@link android.content.Intent#ACTION_SEARCH ACTION_SEARCH} Intent, which you'll already * support once you have implemented and tested basic searchability.)</li> * <li>Create a Content Provider within your application by extending * {@link android.content.SearchRecentSuggestionsProvider}. The class you create will be * very simple - typically, it will have only a constructor. But the constructor has a very * important responsibility: When it calls {@link #setupSuggestions(String, int)}, it * <i>configures</i> the provider to match the requirements of your searchable activity.</li> * <li>Create a manifest entry describing your provider. Typically this would be as simple * as adding the following lines: * <pre class="prettyprint"> * &lt;!-- Content provider for search suggestions --&gt; * &lt;provider android:name="YourSuggestionProviderClass" * android:authorities="your.suggestion.authority" /&gt;</pre> * </li> * <li>Please note that you <i>do not</i> instantiate this content provider directly from within * your code. This is done automatically by the system Content Resolver, when the search dialog * looks for suggestions.</li> * <li>In order for the Content Resolver to do this, you must update your searchable activity's * XML configuration file with information about your content provider. The following additions * are usually sufficient: * <pre class="prettyprint"> * android:searchSuggestAuthority="your.suggestion.authority" * android:searchSuggestSelection=" ? "</pre> * </li> * <li>In your searchable activities, capture any user-generated queries and record them * for future searches by calling {@link android.provider.SearchRecentSuggestions#saveRecentQuery * SearchRecentSuggestions.saveRecentQuery()}.</li> * </ul> * * <div class="special reference"> * <h3>Developer Guides</h3> * <p>For information about using search suggestions in your application, read the * <a href="{@docRoot}guide/topics/search/index.html">Search</a> developer guide.</p> * </div> * * @see android.provider.SearchRecentSuggestions */ public class SearchRecentSuggestionsProvider extends ContentProvider { // debugging support private static final String TAG = "SuggestionsProvider"; // client-provided configuration values private String mAuthority; private int mMode; private boolean mTwoLineDisplay; // general database configuration and tables private SQLiteOpenHelper mOpenHelper; private static final String sDatabaseName = "suggestions.db"; private static final String sSuggestions = "suggestions"; private static final String ORDER_BY = "date DESC"; private static final String NULL_COLUMN = "query"; // Table of database versions. Don't forget to update! // NOTE: These version values are shifted left 8 bits (x 256) in order to create space for // a small set of mode bitflags in the version int. // // 1 original implementation with queries, and 1 or 2 display columns // 1->2 added UNIQUE constraint to display1 column private static final int DATABASE_VERSION = 2 * 256; /** * This mode bit configures the database to record recent queries. <i>required</i> * * @see #setupSuggestions(String, int) */ public static final int DATABASE_MODE_QUERIES = 1; /** * This mode bit configures the database to include a 2nd annotation line with each entry. * <i>optional</i> * * @see #setupSuggestions(String, int) */ public static final int DATABASE_MODE_2LINES = 2; // Uri and query support private static final int URI_MATCH_SUGGEST = 1; private Uri mSuggestionsUri; private UriMatcher mUriMatcher; private String mSuggestSuggestionClause; private String[] mSuggestionProjection; /** * Builds the database. This version has extra support for using the version field * as a mode flags field, and configures the database columns depending on the mode bits * (features) requested by the extending class. * * @hide */ private static class DatabaseHelper extends SQLiteOpenHelper { private int mNewVersion; public DatabaseHelper(Context context, int newVersion) { super(context, sDatabaseName, null, newVersion); mNewVersion = newVersion; } @Override public void onCreate(SQLiteDatabase db) { StringBuilder builder = new StringBuilder(); builder.append("CREATE TABLE suggestions (" + "_id INTEGER PRIMARY KEY" + ",display1 TEXT UNIQUE ON CONFLICT REPLACE"); if (0 != (mNewVersion & DATABASE_MODE_2LINES)) { builder.append(",display2 TEXT"); } builder.append(",query TEXT" + ",date LONG" + ");"); db.execSQL(builder.toString()); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { Log.w(TAG, "Upgrading database from version " + oldVersion + " to " + newVersion + ", which will destroy all old data"); db.execSQL("DROP TABLE IF EXISTS suggestions"); onCreate(db); } } /** * In order to use this class, you must extend it, and call this setup function from your * constructor. In your application or activities, you must provide the same values when * you create the {@link android.provider.SearchRecentSuggestions} helper. * * @param authority This must match the authority that you've declared in your manifest. * @param mode You can use mode flags here to determine certain functional aspects of your * database. Note, this value should not change from run to run, because when it does change, * your suggestions database may be wiped. * * @see #DATABASE_MODE_QUERIES * @see #DATABASE_MODE_2LINES */ protected void setupSuggestions(String authority, int mode) { if (TextUtils.isEmpty(authority) || ((mode & DATABASE_MODE_QUERIES) == 0)) { throw new IllegalArgumentException(); } // unpack mode flags mTwoLineDisplay = (0 != (mode & DATABASE_MODE_2LINES)); // saved values mAuthority = new String(authority); mMode = mode; // derived values mSuggestionsUri = Uri.parse("content://" + mAuthority + "/suggestions"); mUriMatcher = new UriMatcher(UriMatcher.NO_MATCH); mUriMatcher.addURI(mAuthority, SearchManager.SUGGEST_URI_PATH_QUERY, URI_MATCH_SUGGEST); if (mTwoLineDisplay) { mSuggestSuggestionClause = "display1 LIKE ? OR display2 LIKE ?"; mSuggestionProjection = new String [] { "0 AS " + SearchManager.SUGGEST_COLUMN_FORMAT, "'android.resource://system/" + com.android.internal.R.drawable.ic_menu_recent_history + "' AS " + SearchManager.SUGGEST_COLUMN_ICON_1, "display1 AS " + SearchManager.SUGGEST_COLUMN_TEXT_1, "display2 AS " + SearchManager.SUGGEST_COLUMN_TEXT_2, "query AS " + SearchManager.SUGGEST_COLUMN_QUERY, "_id" }; } else { mSuggestSuggestionClause = "display1 LIKE ?"; mSuggestionProjection = new String [] { "0 AS " + SearchManager.SUGGEST_COLUMN_FORMAT, "'android.resource://system/" + com.android.internal.R.drawable.ic_menu_recent_history + "' AS " + SearchManager.SUGGEST_COLUMN_ICON_1, "display1 AS " + SearchManager.SUGGEST_COLUMN_TEXT_1, "query AS " + SearchManager.SUGGEST_COLUMN_QUERY, "_id" }; } } /** * This method is provided for use by the ContentResolver. Do not override, or directly * call from your own code. */ @Override public int delete(Uri uri, String selection, String[] selectionArgs) { SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int length = uri.getPathSegments().size(); if (length != 1) { throw new IllegalArgumentException("Unknown Uri"); } final String base = uri.getPathSegments().get(0); int count = 0; if (base.equals(sSuggestions)) { count = db.delete(sSuggestions, selection, selectionArgs); } else { throw new IllegalArgumentException("Unknown Uri"); } getContext().getContentResolver().notifyChange(uri, null); return count; } /** * This method is provided for use by the ContentResolver. Do not override, or directly * call from your own code. */ @Override public String getType(Uri uri) { if (mUriMatcher.match(uri) == URI_MATCH_SUGGEST) { return SearchManager.SUGGEST_MIME_TYPE; } int length = uri.getPathSegments().size(); if (length >= 1) { String base = uri.getPathSegments().get(0); if (base.equals(sSuggestions)) { if (length == 1) { return "vnd.android.cursor.dir/suggestion"; } else if (length == 2) { return "vnd.android.cursor.item/suggestion"; } } } throw new IllegalArgumentException("Unknown Uri"); } /** * This method is provided for use by the ContentResolver. Do not override, or directly * call from your own code. */ @Override public Uri insert(Uri uri, ContentValues values) { SQLiteDatabase db = mOpenHelper.getWritableDatabase(); int length = uri.getPathSegments().size(); if (length < 1) { throw new IllegalArgumentException("Unknown Uri"); } // Note: This table has on-conflict-replace semantics, so insert() may actually replace() long rowID = -1; String base = uri.getPathSegments().get(0); Uri newUri = null; if (base.equals(sSuggestions)) { if (length == 1) { rowID = db.insert(sSuggestions, NULL_COLUMN, values); if (rowID > 0) { newUri = Uri.withAppendedPath(mSuggestionsUri, String.valueOf(rowID)); } } } if (rowID < 0) { throw new IllegalArgumentException("Unknown Uri"); } getContext().getContentResolver().notifyChange(newUri, null); return newUri; } /** * This method is provided for use by the ContentResolver. Do not override, or directly * call from your own code. */ @Override public boolean onCreate() { if (mAuthority == null || mMode == 0) { throw new IllegalArgumentException("Provider not configured"); } int mWorkingDbVersion = DATABASE_VERSION + mMode; mOpenHelper = new DatabaseHelper(getContext(), mWorkingDbVersion); return true; } /** * This method is provided for use by the ContentResolver. Do not override, or directly * call from your own code. */ // TODO: Confirm no injection attacks here, or rewrite. @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { SQLiteDatabase db = mOpenHelper.getReadableDatabase(); // special case for actual suggestions (from search manager) if (mUriMatcher.match(uri) == URI_MATCH_SUGGEST) { String suggestSelection; String[] myArgs; if (TextUtils.isEmpty(selectionArgs[0])) { suggestSelection = null; myArgs = null; } else { String like = "%" + selectionArgs[0] + "%"; if (mTwoLineDisplay) { myArgs = new String [] { like, like }; } else { myArgs = new String [] { like }; } suggestSelection = mSuggestSuggestionClause; } // Suggestions are always performed with the default sort order Cursor c = db.query(sSuggestions, mSuggestionProjection, suggestSelection, myArgs, null, null, ORDER_BY, null); c.setNotificationUri(getContext().getContentResolver(), uri); return c; } // otherwise process arguments and perform a standard query int length = uri.getPathSegments().size(); if (length != 1 && length != 2) { throw new IllegalArgumentException("Unknown Uri"); } String base = uri.getPathSegments().get(0); if (!base.equals(sSuggestions)) { throw new IllegalArgumentException("Unknown Uri"); } String[] useProjection = null; if (projection != null && projection.length > 0) { useProjection = new String[projection.length + 1]; System.arraycopy(projection, 0, useProjection, 0, projection.length); useProjection[projection.length] = "_id AS _id"; } StringBuilder whereClause = new StringBuilder(256); if (length == 2) { whereClause.append("(_id = ").append(uri.getPathSegments().get(1)).append(")"); } // Tack on the user's selection, if present if (selection != null && selection.length() > 0) { if (whereClause.length() > 0) { whereClause.append(" AND "); } whereClause.append('('); whereClause.append(selection); whereClause.append(')'); } // And perform the generic query as requested Cursor c = db.query(base, useProjection, whereClause.toString(), selectionArgs, null, null, sortOrder, null); c.setNotificationUri(getContext().getContentResolver(), uri); return c; } /** * This method is provided for use by the ContentResolver. Do not override, or directly * call from your own code. */ @Override public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) { throw new UnsupportedOperationException("Not implemented"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.service.persistent; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import org.apache.bookkeeper.mledger.AsyncCallbacks; import org.apache.bookkeeper.mledger.AsyncCallbacks.ClearBacklogCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.DeleteCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.ReadEntriesCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.ReadEntryCallback; import org.apache.bookkeeper.mledger.Entry; import org.apache.bookkeeper.mledger.ManagedCursor; import org.apache.bookkeeper.mledger.ManagedCursor.IndividualDeletedEntries; import org.apache.bookkeeper.mledger.ManagedLedgerException; import org.apache.bookkeeper.mledger.ManagedLedgerException.CursorAlreadyClosedException; import org.apache.bookkeeper.mledger.ManagedLedgerException.TooManyRequestsException; import org.apache.bookkeeper.mledger.Position; import org.apache.bookkeeper.mledger.util.Rate; import org.apache.pulsar.broker.service.AbstractReplicator; import org.apache.pulsar.broker.service.BrokerService; import org.apache.pulsar.broker.service.BrokerServiceException.NamingException; import org.apache.pulsar.broker.service.Replicator; import org.apache.pulsar.client.api.MessageId; import org.apache.pulsar.client.impl.Backoff; import org.apache.pulsar.client.impl.MessageImpl; import org.apache.pulsar.client.impl.ProducerImpl; import org.apache.pulsar.client.impl.SendCallback; import org.apache.pulsar.common.policies.data.ReplicatorStats; import org.apache.pulsar.common.util.Codec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.buffer.ByteBuf; import io.netty.util.Recycler; import io.netty.util.Recycler.Handle; public class PersistentReplicator extends AbstractReplicator implements Replicator, ReadEntriesCallback, DeleteCallback { private final PersistentTopic topic; private final ManagedCursor cursor; private static final int MaxReadBatchSize = 100; private int readBatchSize; private final int producerQueueThreshold; private static final AtomicIntegerFieldUpdater<PersistentReplicator> PENDING_MESSAGES_UPDATER = AtomicIntegerFieldUpdater .newUpdater(PersistentReplicator.class, "pendingMessages"); private volatile int pendingMessages = 0; private static final int FALSE = 0; private static final int TRUE = 1; private static final AtomicIntegerFieldUpdater<PersistentReplicator> HAVE_PENDING_READ_UPDATER = AtomicIntegerFieldUpdater .newUpdater(PersistentReplicator.class, "havePendingRead"); private volatile int havePendingRead = FALSE; private final Rate msgOut = new Rate(); private final Rate msgExpired = new Rate(); private int messageTTLInSeconds = 0; private final Backoff readFailureBackoff = new Backoff(1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES, 0, TimeUnit.MILLISECONDS); private PersistentMessageExpiryMonitor expiryMonitor; // for connected subscriptions, message expiry will be checked if the backlog is greater than this threshold private static final int MINIMUM_BACKLOG_FOR_EXPIRY_CHECK = 1000; private final ReplicatorStats stats = new ReplicatorStats(); public PersistentReplicator(PersistentTopic topic, ManagedCursor cursor, String localCluster, String remoteCluster, BrokerService brokerService) throws NamingException { super(topic.getName(), topic.replicatorPrefix, localCluster, remoteCluster, brokerService); this.topic = topic; this.cursor = cursor; this.expiryMonitor = new PersistentMessageExpiryMonitor(topicName, Codec.decode(cursor.getName()), cursor); HAVE_PENDING_READ_UPDATER.set(this, FALSE); PENDING_MESSAGES_UPDATER.set(this, 0); readBatchSize = Math.min(producerQueueSize, MaxReadBatchSize); producerQueueThreshold = (int) (producerQueueSize * 0.9); startProducer(); } @Override protected void readEntries(org.apache.pulsar.client.api.Producer<byte[]> producer) { // Rewind the cursor to be sure to read again all non-acked messages sent while restarting cursor.rewind(); cursor.cancelPendingReadRequest(); HAVE_PENDING_READ_UPDATER.set(this, FALSE); this.producer = (ProducerImpl) producer; if (STATE_UPDATER.compareAndSet(this, State.Starting, State.Started)) { log.info("[{}][{} -> {}] Created replicator producer", topicName, localCluster, remoteCluster); backOff.reset(); // activate cursor: so, entries can be cached this.cursor.setActive(); // read entries readMoreEntries(); } else { log.info( "[{}][{} -> {}] Replicator was stopped while creating the producer. Closing it. Replicator state: {}", topicName, localCluster, remoteCluster, STATE_UPDATER.get(this)); STATE_UPDATER.set(this, State.Stopping); closeProducerAsync(); } } @Override protected Position getReplicatorReadPosition() { return cursor.getMarkDeletedPosition(); } @Override protected long getNumberOfEntriesInBacklog() { return cursor.getNumberOfEntriesInBacklog(); } @Override protected void disableReplicatorRead() { // deactivate cursor after successfully close the producer this.cursor.setInactive(); } protected void readMoreEntries() { int availablePermits = producerQueueSize - PENDING_MESSAGES_UPDATER.get(this); if (availablePermits > 0) { int messagesToRead = Math.min(availablePermits, readBatchSize); if (!isWritable()) { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Throttling replication traffic because producer is not writable", topicName, localCluster, remoteCluster); } // Minimize the read size if the producer is disconnected or the window is already full messagesToRead = 1; } // Schedule read if (HAVE_PENDING_READ_UPDATER.compareAndSet(this, FALSE, TRUE)) { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Schedule read of {} messages", topicName, localCluster, remoteCluster, messagesToRead); } cursor.asyncReadEntriesOrWait(messagesToRead, this, null); } else { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Not scheduling read due to pending read. Messages To Read {}", topicName, localCluster, remoteCluster, messagesToRead); } } } else { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Producer queue is full, pause reading", topicName, localCluster, remoteCluster); } } } @Override public void readEntriesComplete(List<Entry> entries, Object ctx) { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Read entries complete of {} messages", topicName, localCluster, remoteCluster, entries.size()); } if (readBatchSize < MaxReadBatchSize) { int newReadBatchSize = Math.min(readBatchSize * 2, MaxReadBatchSize); if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Increasing read batch size from {} to {}", topicName, localCluster, remoteCluster, readBatchSize, newReadBatchSize); } readBatchSize = newReadBatchSize; } readFailureBackoff.reduceToHalf(); boolean atLeastOneMessageSentForReplication = false; try { // This flag is set to true when we skip atleast one local message, // in order to skip remaining local messages. boolean isLocalMessageSkippedOnce = false; for (int i = 0; i < entries.size(); i++) { Entry entry = entries.get(i); int length = entry.getLength(); ByteBuf headersAndPayload = entry.getDataBuffer(); MessageImpl msg; try { msg = MessageImpl.deserialize(headersAndPayload); } catch (Throwable t) { log.error("[{}][{} -> {}] Failed to deserialize message at {} (buffer size: {}): {}", topicName, localCluster, remoteCluster, entry.getPosition(), length, t.getMessage(), t); cursor.asyncDelete(entry.getPosition(), this, entry.getPosition()); entry.release(); continue; } if (msg.isReplicated()) { // Discard messages that were already replicated into this region cursor.asyncDelete(entry.getPosition(), this, entry.getPosition()); entry.release(); msg.recycle(); continue; } if (msg.hasReplicateTo() && !msg.getReplicateTo().contains(remoteCluster)) { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Skipping message at position {}, replicateTo {}", topicName, localCluster, remoteCluster, entry.getPosition(), msg.getReplicateTo()); } cursor.asyncDelete(entry.getPosition(), this, entry.getPosition()); entry.release(); msg.recycle(); continue; } if (msg.isExpired(messageTTLInSeconds)) { msgExpired.recordEvent(0 /* no value stat */); if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Discarding expired message at position {}, replicateTo {}", topicName, localCluster, remoteCluster, entry.getPosition(), msg.getReplicateTo()); } cursor.asyncDelete(entry.getPosition(), this, entry.getPosition()); entry.release(); msg.recycle(); continue; } if (STATE_UPDATER.get(this) != State.Started || isLocalMessageSkippedOnce) { // The producer is not ready yet after having stopped/restarted. Drop the message because it will // recovered when the producer is ready if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Dropping read message at {} because producer is not ready", topicName, localCluster, remoteCluster, entry.getPosition()); } isLocalMessageSkippedOnce = true; entry.release(); msg.recycle(); continue; } // Increment pending messages for messages produced locally PENDING_MESSAGES_UPDATER.incrementAndGet(this); msgOut.recordEvent(headersAndPayload.readableBytes()); msg.setReplicatedFrom(localCluster); headersAndPayload.retain(); producer.sendAsync(msg, ProducerSendCallback.create(this, entry, msg)); atLeastOneMessageSentForReplication = true; } } catch (Exception e) { log.error("[{}][{} -> {}] Unexpected exception: {}", topicName, localCluster, remoteCluster, e.getMessage(), e); } HAVE_PENDING_READ_UPDATER.set(this, FALSE); if (atLeastOneMessageSentForReplication && !isWritable()) { // Don't read any more entries until the current pending entries are persisted if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Pausing replication traffic. at-least-one: {} is-writable: {}", topicName, localCluster, remoteCluster, atLeastOneMessageSentForReplication, isWritable()); } } else { readMoreEntries(); } } public void updateCursorState() { if (producer != null && producer.isConnected()) { this.cursor.setActive(); } else { this.cursor.setInactive(); } } private static final class ProducerSendCallback implements SendCallback { private PersistentReplicator replicator; private Entry entry; private MessageImpl msg; @Override public void sendComplete(Exception exception) { if (exception != null) { log.error("[{}][{} -> {}] Error producing on remote broker", replicator.topicName, replicator.localCluster, replicator.remoteCluster, exception); // cursor shoud be rewinded since it was incremented when readMoreEntries replicator.cursor.rewind(); } else { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Message persisted on remote broker", replicator.topicName, replicator.localCluster, replicator.remoteCluster); } replicator.cursor.asyncDelete(entry.getPosition(), replicator, entry.getPosition()); } entry.release(); int pending = PENDING_MESSAGES_UPDATER.decrementAndGet(replicator); // In general, we schedule a new batch read operation when the occupied queue size gets smaller than half // the max size, unless another read operation is already in progress. // If the producer is not currently writable (disconnected or TCP window full), we want to defer the reads // until we have emptied the whole queue, and at that point we will read a batch of 1 single message if the // producer is still not "writable". if (pending < replicator.producerQueueThreshold // && HAVE_PENDING_READ_UPDATER.get(replicator) == FALSE // ) { if (pending == 0 || replicator.producer.isWritable()) { replicator.readMoreEntries(); } else { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Not resuming reads. pending: {} is-writable: {}", replicator.topicName, replicator.localCluster, replicator.remoteCluster, pending, replicator.producer.isWritable()); } } } recycle(); } private final Handle<ProducerSendCallback> recyclerHandle; private ProducerSendCallback(Handle<ProducerSendCallback> recyclerHandle) { this.recyclerHandle = recyclerHandle; } static ProducerSendCallback create(PersistentReplicator replicator, Entry entry, MessageImpl msg) { ProducerSendCallback sendCallback = RECYCLER.get(); sendCallback.replicator = replicator; sendCallback.entry = entry; sendCallback.msg = msg; return sendCallback; } private void recycle() { replicator = null; entry = null; //already released and recycled on sendComplete if (msg != null) { msg.recycle(); msg = null; } recyclerHandle.recycle(this); } private static final Recycler<ProducerSendCallback> RECYCLER = new Recycler<ProducerSendCallback>() { @Override protected ProducerSendCallback newObject(Handle<ProducerSendCallback> handle) { return new ProducerSendCallback(handle); } }; @Override public void addCallback(MessageImpl<?> msg, SendCallback scb) { // noop } @Override public SendCallback getNextSendCallback() { return null; } @Override public MessageImpl<?> getNextMessage() { return null; } @Override public CompletableFuture<MessageId> getFuture() { return null; } } @Override public void readEntriesFailed(ManagedLedgerException exception, Object ctx) { if (STATE_UPDATER.get(this) != State.Started) { log.info("[{}][{} -> {}] Replicator was stopped while reading entries. Stop reading. Replicator state: {}", topic, localCluster, remoteCluster, STATE_UPDATER.get(this)); return; } // Reduce read batch size to avoid flooding bookies with retries readBatchSize = 1; long waitTimeMillis = readFailureBackoff.next(); if(exception instanceof CursorAlreadyClosedException) { log.error("[{}][{} -> {}] Error reading entries because replicator is already deleted and cursor is already closed {}, ({})", topic, localCluster, remoteCluster, ctx, exception.getMessage(), exception); // replicator is already deleted and cursor is already closed so, producer should also be stopped closeProducerAsync(); return; }else if (!(exception instanceof TooManyRequestsException)) { log.error("[{}][{} -> {}] Error reading entries at {}. Retrying to read in {}s. ({})", topic, localCluster, remoteCluster, ctx, waitTimeMillis / 1000.0, exception.getMessage(), exception); } else { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Throttled by bookies while reading at {}. Retrying to read in {}s. ({})", topicName, localCluster, remoteCluster, ctx, waitTimeMillis / 1000.0, exception.getMessage(), exception); } } HAVE_PENDING_READ_UPDATER.set(this, FALSE); brokerService.executor().schedule(this::readMoreEntries, waitTimeMillis, TimeUnit.MILLISECONDS); } public CompletableFuture<Void> clearBacklog() { CompletableFuture<Void> future = new CompletableFuture<>(); if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Backlog size before clearing: {}", topicName, localCluster, remoteCluster, cursor.getNumberOfEntriesInBacklog()); } cursor.asyncClearBacklog(new ClearBacklogCallback() { @Override public void clearBacklogComplete(Object ctx) { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Backlog size after clearing: {}", topicName, localCluster, remoteCluster, cursor.getNumberOfEntriesInBacklog()); } future.complete(null); } @Override public void clearBacklogFailed(ManagedLedgerException exception, Object ctx) { log.error("[{}][{} -> {}] Failed to clear backlog", topicName, localCluster, remoteCluster, exception); future.completeExceptionally(exception); } }, null); return future; } public CompletableFuture<Void> skipMessages(int numMessagesToSkip) { CompletableFuture<Void> future = new CompletableFuture<>(); if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Skipping {} messages, current backlog {}", topicName, localCluster, remoteCluster, numMessagesToSkip, cursor.getNumberOfEntriesInBacklog()); } cursor.asyncSkipEntries(numMessagesToSkip, IndividualDeletedEntries.Exclude, new AsyncCallbacks.SkipEntriesCallback() { @Override public void skipEntriesComplete(Object ctx) { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Skipped {} messages, new backlog {}", topicName, localCluster, remoteCluster, numMessagesToSkip, cursor.getNumberOfEntriesInBacklog()); } future.complete(null); } @Override public void skipEntriesFailed(ManagedLedgerException exception, Object ctx) { log.error("[{}][{} -> {}] Failed to skip {} messages", topicName, localCluster, remoteCluster, numMessagesToSkip, exception); future.completeExceptionally(exception); } }, null); return future; } public CompletableFuture<Entry> peekNthMessage(int messagePosition) { CompletableFuture<Entry> future = new CompletableFuture<>(); if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Getting message at position {}", topicName, localCluster, remoteCluster, messagePosition); } cursor.asyncGetNthEntry(messagePosition, IndividualDeletedEntries.Exclude, new ReadEntryCallback() { @Override public void readEntryFailed(ManagedLedgerException exception, Object ctx) { future.completeExceptionally(exception); } @Override public void readEntryComplete(Entry entry, Object ctx) { future.complete(entry); } }, null); return future; } @Override public void deleteComplete(Object ctx) { if (log.isDebugEnabled()) { log.debug("[{}][{} -> {}] Deleted message at {}", topicName, localCluster, remoteCluster, ctx); } } @Override public void deleteFailed(ManagedLedgerException exception, Object ctx) { log.error("[{}][{} -> {}] Failed to delete message at {}: {}", topicName, localCluster, remoteCluster, ctx, exception.getMessage(), exception); } public void updateRates() { msgOut.calculateRate(); msgExpired.calculateRate(); stats.msgRateOut = msgOut.getRate(); stats.msgThroughputOut = msgOut.getValueRate(); stats.msgRateExpired = msgExpired.getRate() + expiryMonitor.getMessageExpiryRate(); } public ReplicatorStats getStats() { stats.replicationBacklog = cursor.getNumberOfEntriesInBacklog(); stats.connected = producer != null && producer.isConnected(); stats.replicationDelayInSeconds = getReplicationDelayInSeconds(); ProducerImpl producer = this.producer; if (producer != null) { stats.outboundConnection = producer.getConnectionId(); stats.outboundConnectedSince = producer.getConnectedSince(); } else { stats.outboundConnection = null; stats.outboundConnectedSince = null; } return stats; } public void updateMessageTTL(int messageTTLInSeconds) { this.messageTTLInSeconds = messageTTLInSeconds; } private long getReplicationDelayInSeconds() { if (producer != null) { return TimeUnit.MILLISECONDS.toSeconds(producer.getDelayInMillis()); } return 0L; } public void expireMessages(int messageTTLInSeconds) { if ((cursor.getNumberOfEntriesInBacklog() == 0) || (cursor.getNumberOfEntriesInBacklog() < MINIMUM_BACKLOG_FOR_EXPIRY_CHECK && !topic.isOldestMessageExpired(cursor, messageTTLInSeconds))) { // don't do anything for almost caught-up connected subscriptions return; } expiryMonitor.expireMessages(messageTTLInSeconds); } private static final Logger log = LoggerFactory.getLogger(PersistentReplicator.class); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.wink.itest.standard; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Random; import junit.framework.TestCase; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpException; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.methods.InputStreamRequestEntity; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.methods.PutMethod; import org.apache.commons.httpclient.methods.StringRequestEntity; import org.apache.wink.test.integration.ServerEnvironmentInfo; public class JAXRSReaderTest extends TestCase { public String getBaseURI() { if (ServerEnvironmentInfo.isRestFilterUsed()) { return ServerEnvironmentInfo.getBaseURI(); } return ServerEnvironmentInfo.getBaseURI() + "/standard"; } /** * Tests posting to a Reader parameter. * * @throws HttpException * @throws IOException */ public void testPostReader() throws HttpException, IOException { HttpClient client = new HttpClient(); PostMethod postMethod = new PostMethod(getBaseURI() + "/providers/standard/reader"); postMethod.setRequestEntity(new StringRequestEntity("abcd", "text/plain", "UTF-8")); postMethod.addRequestHeader("Accept", "text/plain"); try { client.executeMethod(postMethod); assertEquals(200, postMethod.getStatusCode()); InputStream is = postMethod.getResponseBodyAsStream(); InputStreamReader isr = new InputStreamReader(is); char[] buffer = new char[1]; int read = 0; int offset = 0; while ((read = isr.read(buffer, offset, buffer.length - offset)) != -1) { offset += read; if (offset >= buffer.length) { buffer = ArrayUtils.copyOf(buffer, buffer.length * 2); } } char[] carr = ArrayUtils.copyOf(buffer, offset); int checkEOF = is.read(); assertEquals(-1, checkEOF); String str = new String(carr); assertEquals("abcd", str); assertEquals("text/plain", postMethod.getResponseHeader("Content-Type").getValue()); Header contentLengthHeader = postMethod.getResponseHeader("Content-Length"); assertNull(contentLengthHeader == null ? "null" : contentLengthHeader.getValue(), contentLengthHeader); } finally { postMethod.releaseConnection(); } } /** * Tests putting and then getting a Reader. * * @throws HttpException * @throws IOException */ public void testPutReader() throws HttpException, IOException { HttpClient client = new HttpClient(); PutMethod putMethod = new PutMethod(getBaseURI() + "/providers/standard/reader"); putMethod.setRequestEntity(new StringRequestEntity("wxyz", "char/array", "UTF-8")); try { client.executeMethod(putMethod); assertEquals(204, putMethod.getStatusCode()); } finally { putMethod.releaseConnection(); } GetMethod getMethod = new GetMethod(getBaseURI() + "/providers/standard/reader"); try { client.executeMethod(getMethod); assertEquals(200, getMethod.getStatusCode()); InputStream is = getMethod.getResponseBodyAsStream(); InputStreamReader isr = new InputStreamReader(is); char[] buffer = new char[1]; int read = 0; int offset = 0; while ((read = isr.read(buffer, offset, buffer.length - offset)) != -1) { offset += read; if (offset >= buffer.length) { buffer = ArrayUtils.copyOf(buffer, buffer.length * 2); } } char[] carr = ArrayUtils.copyOf(buffer, offset); int checkEOF = is.read(); assertEquals(-1, checkEOF); String str = new String(carr); assertEquals("wxyz", str); String contentType = (getMethod.getResponseHeader("Content-Type") == null) ? null : getMethod .getResponseHeader("Content-Type").getValue(); assertNotNull(contentType, contentType); Header contentLengthHeader = getMethod.getResponseHeader("Content-Length"); assertNull(contentLengthHeader == null ? "null" : contentLengthHeader.getValue(), contentLengthHeader); } finally { getMethod.releaseConnection(); } } /** * @throws HttpException * @throws IOException */ public void testWithRequestAcceptHeaderWillReturnRequestedContentType() throws HttpException, IOException { HttpClient client = new HttpClient(); PutMethod putMethod = new PutMethod(getBaseURI() + "/providers/standard/reader"); putMethod.setRequestEntity(new StringRequestEntity("wxyz", "char/array", "UTF-8")); try { client.executeMethod(putMethod); assertEquals(204, putMethod.getStatusCode()); } finally { putMethod.releaseConnection(); } GetMethod getMethod = new GetMethod(getBaseURI() + "/providers/standard/reader"); getMethod.addRequestHeader("Accept", "mytype/subtype"); try { client.executeMethod(getMethod); assertEquals(200, getMethod.getStatusCode()); InputStream is = getMethod.getResponseBodyAsStream(); InputStreamReader isr = new InputStreamReader(is); char[] buffer = new char[1]; int read = 0; int offset = 0; while ((read = isr.read(buffer, offset, buffer.length - offset)) != -1) { offset += read; if (offset >= buffer.length) { buffer = ArrayUtils.copyOf(buffer, buffer.length * 2); } } char[] carr = ArrayUtils.copyOf(buffer, offset); int checkEOF = is.read(); assertEquals(-1, checkEOF); String str = new String(carr); assertEquals("wxyz", str); assertEquals("mytype/subtype", getMethod.getResponseHeader("Content-Type").getValue()); Header contentLengthHeader = getMethod.getResponseHeader("Content-Length"); assertNull(contentLengthHeader == null ? "null" : contentLengthHeader.getValue(), contentLengthHeader); } finally { getMethod.releaseConnection(); } } /** * Tests a resource method invoked with a BufferedReader as a parameter. * This should fail with a 415 since the reader has no way to necessarily * wrap it to the type. * * @throws HttpException * @throws IOException */ public void testInputStreamImplementation() throws HttpException, IOException { HttpClient client = new HttpClient(); PostMethod postMethod = new PostMethod(getBaseURI() + "/providers/standard/reader/subclasses/shouldfail"); byte[] barr = new byte[1000]; Random r = new Random(); r.nextBytes(barr); postMethod.setRequestEntity(new InputStreamRequestEntity(new ByteArrayInputStream(barr), "any/type")); try { client.executeMethod(postMethod); assertEquals(415, postMethod.getStatusCode()); } finally { postMethod.releaseConnection(); } } /** * Tests sending in no request entity to a Reader entity parameter. * * @throws HttpException * @throws IOException */ public void testSendingNoRequestEntityReader() throws HttpException, IOException { HttpClient client = new HttpClient(); PostMethod postMethod = new PostMethod(getBaseURI() + "/providers/standard/reader/empty"); try { client.executeMethod(postMethod); assertEquals(200, postMethod.getStatusCode()); assertEquals("expected", postMethod.getResponseBodyAsString()); } finally { postMethod.releaseConnection(); } } }
package com.capitalone.dashboard.rest; import com.capitalone.dashboard.config.TestConfig; import com.capitalone.dashboard.config.WebMVCConfig; import com.capitalone.dashboard.misc.HygieiaException; import com.capitalone.dashboard.model.CodeQuality; import com.capitalone.dashboard.model.CodeQualityMetric; import com.capitalone.dashboard.model.CodeQualityMetricStatus; import com.capitalone.dashboard.model.CodeQualityType; import com.capitalone.dashboard.model.DataResponse; import com.capitalone.dashboard.request.CodeQualityCreateRequest; import com.capitalone.dashboard.request.CodeQualityRequest; import com.capitalone.dashboard.service.CodeQualityService; import com.capitalone.dashboard.util.TestUtil; import org.bson.types.ObjectId; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.MethodSorters; import org.mockito.Matchers; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; import java.util.Arrays; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = { TestConfig.class, WebMVCConfig.class }) @WebAppConfiguration @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class CodeQualityControllerTest { private MockMvc mockMvc; @Autowired private WebApplicationContext wac; @Autowired private CodeQualityService codeQualityService; @Before public void before() { mockMvc = MockMvcBuilders.webAppContextSetup(wac).build(); } @Test public void staticQualities() throws Exception { CodeQuality quality = makeCodeQualityStatic(); Iterable<CodeQuality> qualities = Arrays.asList(quality); DataResponse<Iterable<CodeQuality>> response = new DataResponse<>( qualities, 1); CodeQualityMetric metric = makeMetric(); when(codeQualityService.search(Mockito.any(CodeQualityRequest.class))) .thenReturn(response); mockMvc.perform( get("/quality/static-analysis?componentId=" + ObjectId.get() + "&max=1")) .andExpect(status().isOk()) .andExpect(jsonPath("$result", hasSize(1))) .andExpect( jsonPath("$result[0].id", is(quality.getId().toString()))) .andExpect( jsonPath("$result[0].collectorItemId", is(quality .getCollectorItemId().toString()))) .andExpect( jsonPath("$result[0].timestamp", is(intVal(quality.getTimestamp())))) .andExpect(jsonPath("$result[0].name", is(quality.getName().toString()))) .andExpect(jsonPath("$result[0].url", is(quality.getUrl()))) .andExpect(jsonPath("$result[0].type", is(quality.getType().toString()))) .andExpect( jsonPath("$result[0].version", is(quality.getVersion()))) .andExpect( jsonPath("$result[0].metrics[0].name", is(metric.getName().toString()))) .andExpect( jsonPath("$result[0].metrics[0].formattedValue", is(metric.getFormattedValue()))) .andExpect( jsonPath("$result[0].metrics[0].status", is(metric.getStatus().toString()))); } @Test public void securityQualities() throws Exception { CodeQuality quality = makeSecurityAnalysis(); Iterable<CodeQuality> qualities = Arrays.asList(quality); DataResponse<Iterable<CodeQuality>> response = new DataResponse<>( qualities, 1); CodeQualityMetric metric = makeMetric(); when(codeQualityService.search(Mockito.any(CodeQualityRequest.class))) .thenReturn(response); mockMvc.perform( get("/quality/security-analysis?componentId=" + ObjectId.get() + "&max=1")) .andExpect(status().isOk()) .andExpect(jsonPath("$result", hasSize(1))) .andExpect( jsonPath("$result[0].id", is(quality.getId().toString()))) .andExpect( jsonPath("$result[0].collectorItemId", is(quality .getCollectorItemId().toString()))) .andExpect( jsonPath("$result[0].timestamp", is(intVal(quality.getTimestamp())))) .andExpect(jsonPath("$result[0].name", is(quality.getName()))) .andExpect(jsonPath("$result[0].url", is(quality.getUrl()))) .andExpect(jsonPath("$result[0].type", is(quality.getType().toString()))) .andExpect( jsonPath("$result[0].version", is(quality.getVersion()))) .andExpect( jsonPath("$result[0].metrics[0].name", is(metric.getName()))) .andExpect( jsonPath("$result[0].metrics[0].formattedValue", is(metric.getFormattedValue()))) .andExpect( jsonPath("$result[0].metrics[0].status", is(metric.getStatus().toString()))); } @Test public void builds_noComponentId_badRequest() throws Exception { mockMvc.perform(get("/quality")).andExpect(status().isBadRequest()); } @Test public void insertStaticAnalysisTest1() throws Exception { CodeQualityCreateRequest request = makeCodeQualityRequest(); @SuppressWarnings("unused") byte[] content = TestUtil.convertObjectToJsonBytes(request); when(codeQualityService.create(Matchers.any(CodeQualityCreateRequest.class))).thenReturn("123456"); mockMvc.perform(post("/quality/static-analysis") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(request))) .andExpect(status().isCreated()); } @Test public void insertStaticAnalysisTest2() throws Exception { CodeQualityCreateRequest request = makeCodeQualityRequest(); request.setProjectName(null); @SuppressWarnings("unused") byte[] content = TestUtil.convertObjectToJsonBytes(request); when(codeQualityService.create(Matchers.any(CodeQualityCreateRequest.class))).thenReturn("1234"); mockMvc.perform(post("/quality/static-analysis") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(request))) .andExpect(status().isBadRequest()); } @Test public void insertStaticAnalysisTest3() throws Exception { CodeQualityCreateRequest request = makeCodeQualityRequest(); @SuppressWarnings("unused") byte[] content = TestUtil.convertObjectToJsonBytes(request); when(codeQualityService.create(Matchers.any(CodeQualityCreateRequest.class))).thenThrow(new HygieiaException("This is bad", HygieiaException.COLLECTOR_CREATE_ERROR)); mockMvc.perform(post("/quality/static-analysis") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(request))) .andExpect(status().isInternalServerError()); } private CodeQualityCreateRequest makeCodeQualityRequest() { CodeQualityCreateRequest quality = new CodeQualityCreateRequest(); quality.setHygieiaId("2345"); quality.setProjectId("1234"); quality.setTimestamp(1); quality.setProjectName("MyTest"); quality.setType(CodeQualityType.StaticAnalysis); quality.setProjectUrl("http://mycompany.sonar.com/MyTest"); quality.setServerUrl("http://mycompany.sonar.com"); quality.setProjectVersion("1.0.0.1"); quality.getMetrics().add(makeMetric()); return quality; } private CodeQuality makeCodeQualityStatic() { CodeQuality quality = new CodeQuality(); quality.setId(ObjectId.get()); quality.setCollectorItemId(ObjectId.get()); quality.setTimestamp(1); quality.setName("MyTest"); quality.setType(CodeQualityType.StaticAnalysis); quality.setUrl("http://mycompany.sonar.com/MyTest"); quality.setVersion("1.0.0.1"); quality.getMetrics().add(makeMetric()); return quality; } private CodeQuality makeSecurityAnalysis() { CodeQuality quality = new CodeQuality(); quality.setId(ObjectId.get()); quality.setCollectorItemId(ObjectId.get()); quality.setTimestamp(1); quality.setName("MyFortify"); quality.setType(CodeQualityType.SecurityAnalysis); quality.setUrl("http://mycompany.fortify.ssc.com/MyFortify"); quality.setVersion("dev"); quality.getMetrics().add(makeMetric()); return quality; } private CodeQualityMetric makeMetric() { CodeQualityMetric metric = new CodeQualityMetric("critical"); metric.setFormattedValue("10"); metric.setStatus(CodeQualityMetricStatus.Ok); metric.setStatusMessage("Ok"); metric.setValue(new Integer(0)); return metric; } private int intVal(long value) { return Long.valueOf(value).intValue(); } }
/** * Copyright (C) 2014-2016 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.util; import com.linkedin.pinot.common.utils.MmapUtils; import com.linkedin.pinot.core.indexsegment.utils.BitUtils; import java.io.Closeable; import java.nio.ByteBuffer; /** * * Util class to store bit set, provides additional utility over java bit set by * allowing reading int from start bit to end bit */ public final class CustomBitSet implements Closeable { private final int nrBytes; private ByteBuffer buf; private final static int[] bitCountArray = new int[256]; private final static int IGNORED_ZEROS_COUNT = Integer.SIZE - Byte.SIZE; private final boolean ownsByteBuffer; static { for (int i = 0; i < 256; i++) { bitCountArray[i] = Integer.bitCount(i); } } private CustomBitSet(final int nrBytes) { if (nrBytes < 1) { throw new IllegalArgumentException("CustomBitSet requires at least one byte of storage, asked for " + nrBytes); } this.nrBytes = nrBytes; buf = MmapUtils.allocateDirectByteBuffer(nrBytes, null, this.getClass().getSimpleName() + " buf"); ownsByteBuffer = true; } private CustomBitSet(final int nrBytes, final ByteBuffer buffer) { if (nrBytes < 1) { throw new IllegalArgumentException("CustomBitSet requires at least one byte of storage, asked for " + nrBytes); } if (nrBytes * 8 >= Integer.MAX_VALUE) { throw new IllegalArgumentException("Requested bit set capacity is " + nrBytes + " bytes, but number of bits exceeds the Integer.MAX_VALUE and since we use int data type to return position, we will overflow int."); } if (buffer.capacity() < nrBytes) { throw new IllegalArgumentException("Requested bit set capacity is " + nrBytes + " bytes, but the underlying byte buffer has a capacity of " + buffer.capacity() + ", which is less than requested"); } this.nrBytes = nrBytes; this.buf = buffer; ownsByteBuffer = false; } public static CustomBitSet withByteBuffer(final int numBytes, ByteBuffer byteBuffer) { return new CustomBitSet(numBytes, byteBuffer); } public static CustomBitSet withByteLength(final int nrBytes) { return new CustomBitSet(nrBytes); } public static CustomBitSet withBitLength(final int nrBits) { return new CustomBitSet((nrBits - 1) / 8 + 1); } public void setBit(final long bitOffset) { if (bitOffset < 0) { throw new IllegalArgumentException("Negative bitOffset value " + bitOffset); } final int byteToSet = (int)(bitOffset / 8); if (byteToSet > nrBytes) { throw new IllegalArgumentException("bitOffset value " + bitOffset + " (byte offset " + byteToSet + ") exceeds buffer capacity of " + nrBytes + " bytes"); } byte b = buf.get(byteToSet); byte posBit = (byte) (1 << (7 - (bitOffset % 8))); // System.out.println("bitOffset:" + bitOffset + " posBit:" + posBit); b |= posBit; buf.put(byteToSet, b); } public void unsetBit(final long bitOffset) { if (bitOffset < 0) { throw new IllegalArgumentException("Negative bitOffset value " + bitOffset); } final int byteToSet = (int)(bitOffset / 8); if (byteToSet > nrBytes) { throw new IllegalArgumentException("bitOffset value " + bitOffset + " (byte offset " + byteToSet + ") exceeds buffer capacity of " + nrBytes + " bytes"); } final int offset = (int)(bitOffset % 8); byte b = buf.get(byteToSet); b &= ~(1 << (7 - offset)); buf.put(byteToSet, b); } /** * reads the read between the start (inclusive) and end (exclusive) * * @return */ public int readInt(long startBitIndex, long endBitIndex) { int bitLength = (int) (endBitIndex - startBitIndex); if (bitLength < 16 && endBitIndex + 32 < nrBytes * 8L) { int bytePosition = (int) (startBitIndex / 8); int bitOffset = (int) (startBitIndex % 8); int shiftOffset = 32 - (bitOffset + bitLength); int intValue = buf.getInt(bytePosition); int bitMask = (1 << bitLength) - 1; return (intValue >> shiftOffset) & bitMask; } else { int bytePosition = (int) (startBitIndex >>> 3); int startBitOffset = (int) (startBitIndex & 7); int sum = startBitOffset + bitLength; int endBitOffset = (8 - (sum & 7)) & 7; // int numberOfBytesUsed = (sum >>> 3) + ((sum & 7) != 0 ? 1 : 0); int numberOfBytesUsed = (sum + 7) >>> 3; int i = -1; long number = 0; while (true) { number |= (buf.get(bytePosition)) & 0xFF; i++; bytePosition++; if (i == numberOfBytesUsed - 1) { break; } number <<= 8; } number >>= endBitOffset; number &= (0xFFFFFFFF >>> (32 - bitLength)); return (int) number; } } public void writeInt(long startBitIndex, int bitLength, int value) { if (bitLength < 16 && startBitIndex + bitLength + 32 < nrBytes * 8L) { int bytePosition = (int) (startBitIndex / 8); int bitOffset = (int) (startBitIndex % 8); int shiftOffset = 32 - (bitOffset + bitLength); int intValue = buf.getInt(bytePosition); int bitMask = ((1 << bitLength) - 1) << shiftOffset; int updatedIntValue = (intValue & ~bitMask) | ((value << shiftOffset) & bitMask); buf.putInt(bytePosition, updatedIntValue); } else { for (int bitPos = bitLength - 1; bitPos >= 0; bitPos--) { if ((value & (1 << bitPos)) != 0) { setBit(startBitIndex + (bitLength - bitPos - 1)); } } } } public byte[] toByteArray() { byte[] dst = new byte[buf.capacity()]; buf.get(dst); return dst; } @Override public String toString() { byte[] array = toByteArray(); StringBuilder sb = new StringBuilder(); sb.append("["); for (byte b : array) { sb.append(Integer.toBinaryString((b & 0xFF) + 0x100).substring(1)); } sb.append("]"); return sb.toString(); } /** * Finds the index of the Nth bit set after the startBitIndex, the bit at startBitIndex is excluded * @param startBitIndex The index from which to start the search * @param n The * @return */ public int findNthBitSetAfter(int startBitIndex, int n) { int searchStartBitIndex = startBitIndex + 1; int bytePosition = (searchStartBitIndex / 8); int bitPosition = (searchStartBitIndex % 8); if (bytePosition >= nrBytes) { return -1; } int currentByte = (buf.get(bytePosition) << bitPosition) & 0xFF; int numberOfBitsOnInCurrentByte = bitCountArray[currentByte]; int numberOfBitsToSkip = n - 1; // Is the bit we're looking for in the current byte? if (n <= numberOfBitsOnInCurrentByte) { currentByte = BitUtils.turnOffNthLeftmostSetBits(currentByte, numberOfBitsToSkip); return Integer.numberOfLeadingZeros(currentByte) - IGNORED_ZEROS_COUNT + startBitIndex + 1; } // Skip whole bytes until we bit we're looking for is in the current byte while (numberOfBitsOnInCurrentByte <= numberOfBitsToSkip) { numberOfBitsToSkip -= numberOfBitsOnInCurrentByte; bytePosition++; if (bytePosition >= nrBytes) { return -1; } currentByte = buf.get(bytePosition) & 0xFF; numberOfBitsOnInCurrentByte = bitCountArray[currentByte]; } int currentBitPosition = nextSetBit(bytePosition * 8); while (0 < numberOfBitsToSkip && currentBitPosition != -1) { currentBitPosition = nextSetBit(currentBitPosition + 1); numberOfBitsToSkip--; } return currentBitPosition; } /** * Obtains the index of the first bit set at the current index position or after. * @param index Index of the bit to search from, inclusive. * @return The index of the first bit set at or after the given index, or -1 if there are no bits set after the search * index. */ public int nextSetBit(int index) { int bytePosition = (index / 8); int bitPosition = (index % 8); if (bytePosition >= nrBytes) { return -1; } // Assuming index 3 // --- IGNORED_ZEROS_COUNT -- // index // v // 00000000 00000000 00000000 00000010 int currentByte = (buf.get(bytePosition) << bitPosition) & 0xFF; if (currentByte != 0) { return Integer.numberOfLeadingZeros(currentByte) - IGNORED_ZEROS_COUNT + index; } int bytesSkipped = 0; // Skip whole bytes while (currentByte == 0) { bytesSkipped++; if (bytePosition + bytesSkipped >= nrBytes) { return -1; } currentByte = buf.get(bytePosition + bytesSkipped) & 0xFF; } int zerosCount = Integer.numberOfLeadingZeros(currentByte) - IGNORED_ZEROS_COUNT; return zerosCount + (bytePosition + bytesSkipped) * 8; } /** * Obtains the index of the first bit set after the current index position. * @param index Index of the bit to search from, exclusive. * @return The index of the first bit set after the given index, or -1 if there are no bits set after the search * index. */ public int nextSetBitAfter(int index) { return nextSetBit(index + 1); } public boolean isBitSet(int index) { final int byteToCheck = (int)(index >>> 3); assert (byteToCheck < nrBytes); byte b = buf.get(byteToCheck); // System.out.println(Integer.toBinaryString((b & 0xFF) + 0x100).substring(1)); final int offset = (int)(7 - index % 8); return ((b & (1 << offset)) != 0); } @Override public void close() { if (ownsByteBuffer) { MmapUtils.unloadByteBuffer(buf); buf = null; } } }
/* * XmlDataFileParser.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.db.importer; import java.io.File; import java.io.IOException; import java.io.Reader; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import workbench.interfaces.ImportFileParser; import workbench.interfaces.JobErrorHandler; import workbench.log.LogMgr; import workbench.resource.ResourceMgr; import workbench.db.ColumnIdentifier; import workbench.db.TableIdentifier; import workbench.db.exporter.BlobMode; import workbench.db.exporter.XmlRowDataConverter; import workbench.util.ExceptionUtil; import workbench.util.FileUtil; import workbench.util.MessageBuffer; import workbench.util.SqlUtil; import workbench.util.StringUtil; import workbench.util.WbStringTokenizer; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.helpers.DefaultHandler; /** * * @author Thomas Kellerer */ public class XmlDataFileParser extends AbstractImportFileParser implements ImportFileParser { private String tableNameFromFile; private int currentRowNumber = 1; private int colCount; private int realColCount; private ColumnIdentifier[] columns; private Object[] currentRow; private boolean[] warningAdded; private boolean verboseFormat = true; private boolean formatKnown = false; private String missingColumn; private boolean ignoreCurrentRow; private int currentColIndex = 0; private int realColIndex = 0; private long columnLongValue = 0; private String columnDataFile = null; private boolean isNull = false; private StringBuilder chars; private String rowTag = XmlRowDataConverter.LONG_ROW_TAG; private String columnTag = XmlRowDataConverter.LONG_COLUMN_TAG; private SAXParser saxParser; private DefaultHandler handler = new SaxHandler(); public XmlDataFileParser() { super(); SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setValidating(false); try { saxParser = factory.newSAXParser(); } catch (Exception e) { // should not happen! LogMgr.logError("XmlDataFileParser.<init>", "Error creating XML parser", e); } } public XmlDataFileParser(File file) { this(); this.inputFile = file; } @Override public String getColumns() { return StringUtil.listToString(this.importColumns, ',', false); } @Override public void addColumnFilter(String colname, String regex) { // not yet supported } @Override public String getLastRecord() { return null; } @Override public Map<Integer, Object> getInputColumnValues(Collection<Integer> inputFileIndexes) { return null; } @Override protected TablenameResolver getTableNameResolver() { return new XmlTableNameResolver(getEncoding()); } @Override public boolean hasWarnings() { if (super.hasWarnings()) return true; if (this.warningAdded == null) return false; for (boolean b : warningAdded) { if (b) return true; } return false; } public void setColumns(String columnList) throws SQLException { if (StringUtil.isNonBlank(columnList)) { WbStringTokenizer tok = new WbStringTokenizer(columnList, ","); importColumns = ImportFileColumn.createList(); while (tok.hasMoreTokens()) { String col = tok.nextToken(); if (col == null) continue; col = col.trim(); if (col.length() == 0) continue; ColumnIdentifier ci = new ColumnIdentifier(col); importColumns.add(new ImportFileColumn(ci)); } } else { importColumns = null; } checkImportColumns(); } /** * Define the columns to be imported */ @Override public void setColumns(List<ColumnIdentifier> cols) throws SQLException { if (cols != null && cols.size() > 0) { importColumns = ImportFileColumn.createList(); Iterator<ColumnIdentifier> itr = cols.iterator(); while (itr.hasNext()) { ColumnIdentifier id = itr.next(); if (!id.getColumnName().equals(RowDataProducer.SKIP_INDICATOR)) { importColumns.add(new ImportFileColumn(id)); } } } else { this.importColumns = null; } checkImportColumns(); } /** * Check if all columns defined for the import (through the table definition * as part of the XML file, or passed by the user on the command line) are * actually available in the target table. * For this all columns of the target table are retrieved from the database, * and each column that has been defined through setColumns() is checked * whether it exists there. Columns that are not found are dropped from * the list of import columns * If continueOnError == true, a warning is added to the messages. Otherwise * an Exception is thrown. */ public void checkTargetColumns(TableIdentifier tbl) throws SQLException { if (this.connection == null) return; if (this.columns == null) return; if (tbl == null) return; if (!this.connection.getMetadata().tableExists(tbl)) { if (this.receiver.getCreateTarget()) { LogMgr.logDebug("XmlDataFileParser.checkTargetColumns()", "Table " + tbl.getTableName() + " not found, but receiver will create it. Skipping column check..."); return; } else { String msg = ResourceMgr.getFormattedString("ErrTargetTableNotFound", tbl.getTableName()); this.messages.append(msg); this.messages.appendNewLine(); throw new SQLException("Table '" + tbl.getTableName() + "' not found!"); } } List<ColumnIdentifier> tableCols = this.connection.getMetadata().getTableColumns(tbl); List<ImportFileColumn> validCols = ImportFileColumn.createList(); for (int colIndex=0; colIndex < this.columns.length; colIndex++) { int i = tableCols.indexOf(this.columns[colIndex]); if (i != -1) { // Use the column definition retrieved from the database // to make sure we are using the correct data types when converting the input (String) values // this is also important to get quoting of column names // with special characters correctly (as this is handled by DbMetadata already // but the columns retrieved from the XML file are not quoted correctly) ColumnIdentifier tc = tableCols.get(i); this.columns[colIndex] = tc; validCols.add(new ImportFileColumn(tc)); } else { String errorColumn = (this.columns[colIndex] != null ? this.columns[colIndex].getColumnName() : "n/a"); String msg = ResourceMgr.getFormattedString("ErrImportColumnNotFound", errorColumn, tbl.getTableExpression()); this.messages.append(msg); this.messages.appendNewLine(); if (this.abortOnError) { this.hasErrors = true; throw new SQLException("Column " + errorColumn + " not found in target table"); } else { this.hasWarnings = true; LogMgr.logWarning("XmlDataFileParser.checkTargetColumns()", msg); } } } // Make sure we are using the columns collected during the check if (validCols.size() != columns.length) { this.importColumns = validCols; this.realColCount = this.importColumns.size(); } } private void checkImportColumns() throws SQLException { if (importColumns == null) { this.realColCount = this.colCount; return; } this.missingColumn = null; try { if (this.columns == null) this.readXmlTableDefinition(); } catch (Throwable e) { LogMgr.logError("XmlDataFileParser.checkImportColumns()", "Error reading table definition from XML file", e); this.hasErrors = true; throw new SQLException("Could not read table definition from XML file"); } Iterator<ImportFileColumn> cols = importColumns.iterator(); while (cols.hasNext()) { ColumnIdentifier c = cols.next().getColumn(); if (!this.containsColumn(c)) { if (ignoreMissingColumns || !abortOnError) { String msg = ResourceMgr.getFormattedString("ErrImportColumnIgnored", c.getColumnName(), this.tableName); LogMgr.logWarning("XmlDataFileParser.checkImportColumns()", "Ignoring table column " + c.getColumnName() + " because it is not present in the input file"); this.hasWarnings = true; if (!ignoreMissingColumns) this.hasErrors = true; this.messages.append(msg); cols.remove(); } else { this.missingColumn = c.getColumnName(); this.hasErrors = true; throw new SQLException("The column " + c.getColumnName() + " from the table " + this.tableName + " is not present in input file!"); } } } this.realColCount = this.importColumns.size(); } /** * Returns the first column from the import columns * that is not found in the import file * @see #setColumns(String) * @see #setColumns(List) */ public String getMissingColumn() { return this.missingColumn; } private boolean containsColumn(ColumnIdentifier col) { if (this.columns == null) return false; for (ColumnIdentifier column : this.columns) { if (column.equals(col)) { return true; } } return false; } @Override public List<ColumnIdentifier> getColumnsFromFile() { try { if (this.columns == null) this.readXmlTableDefinition(); } catch (IOException | SAXException e) { return Collections.emptyList(); } ArrayList<ColumnIdentifier> result = new ArrayList<>(this.columns.length); result.addAll(Arrays.asList(this.columns)); return result; } private void detectBlobEncoding() { try { fileHandler.setMainFile(this.inputFile, getEncoding()); XmlTableDefinitionParser tableDef = new XmlTableDefinitionParser(this.fileHandler); String mode = tableDef.getBlobEncoding(); if (StringUtil.isNonBlank(mode)) { BlobMode bmode = BlobMode.getMode(mode); blobDecoder.setBlobMode(bmode); } } catch (Exception e) { LogMgr.logError("XmlDataFileParser", "Could not detect XML tag format. Assuming 'verbose'", e); this.setUseVerboseFormat(true); } } @Override public String getEncoding() { return (StringUtil.isEmptyString(this.encoding) ? "UTF-8" : this.encoding); } private void detectTagFormat() { try { fileHandler.setMainFile(this.inputFile, getEncoding()); XmlTableDefinitionParser tableDef = new XmlTableDefinitionParser(this.fileHandler); detectTagFormat(tableDef); } catch (Exception e) { LogMgr.logError("XmlDataFileParser", "Could not detect XML tag format. Assuming 'verbose'", e); this.setUseVerboseFormat(true); } } private void detectTagFormat(XmlTableDefinitionParser tableDef) { String format = tableDef.getTagFormat(); if (format != null) { if (XmlRowDataConverter.KEY_FORMAT_LONG.equals(format)) { this.setUseVerboseFormat(true); } else if (XmlRowDataConverter.KEY_FORMAT_SHORT.equals(format)) { this.setUseVerboseFormat(false); } } } private void readXmlTableDefinition() throws IOException, SAXException { fileHandler.setMainFile(this.inputFile, getEncoding()); XmlTableDefinitionParser tableDef = new XmlTableDefinitionParser(this.fileHandler); this.columns = tableDef.getColumns(); this.colCount = this.columns.length; this.tableNameFromFile = tableDef.getTableName(); this.warningAdded = new boolean[this.colCount]; detectTagFormat(tableDef); } @Override protected void processOneFile() throws Exception { // readTableDefinition relies on the fileHandler, so this // has to be called after initializing the fileHandler if (this.columns == null) { this.readXmlTableDefinition(); } if (!this.formatKnown) { detectTagFormat(); } detectBlobEncoding(); if (this.importColumns == null) { this.realColCount = this.colCount; } else { this.realColCount = this.importColumns.size(); } // Re-initialize the reader in case we are reading from a ZIP archive // because readTableDefinition() can change the file handler this.fileHandler.setMainFile(this.inputFile, getEncoding()); blobDecoder.setBaseDir(inputFile.getParentFile()); if (!sharedMessages) this.messages = new MessageBuffer(); this.sendTableDefinition(); Reader in = null; try { in = this.fileHandler.getMainFileReader(); InputSource source = new InputSource(in); saxParser.parse(source, handler); filesProcessed.add(inputFile); this.receiver.tableImportFinished(); } catch (ParsingInterruptedException e) { if (this.regularStop) { this.receiver.tableImportFinished(); } else { this.hasErrors = true; } } catch (ParsingConverterException pce) { // already logged and added to the messages this.receiver.tableImportError(); this.hasErrors = true; throw pce; } catch (Exception e) { String msg = "Error during parsing of data row: " + (this.currentRowNumber) + ", column: " + this.currentColIndex + ", current data: " + (this.chars == null ? "<n/a>" : "[" + this.chars.toString() + "]" ) + ", message: " + ExceptionUtil.getDisplay(e); LogMgr.logWarning("XmlDataFileParser.processOneFile()", msg); this.hasErrors = true; this.messages.append(msg); this.messages.appendNewLine(); this.receiver.tableImportError(); throw e; } finally { FileUtil.closeQuietely(in); } } @Override protected void resetForFile() { super.resetForFile(); tableNameFromFile = null; ignoreCurrentRow = false; currentColIndex = 0; realColIndex = 0; columnLongValue = 0; isNull = false; chars = null; columns = null; importColumns = null; } private void clearRowData() { for (int i=0; i < this.realColCount; i++) { this.currentRow[i] = null; } this.currentColIndex = 0; this.realColIndex = 0; } /** * Creates the approriate column data object and puts it * into rowData[currentColIndex] * {@link workbench.util.ValueConverter} is not used because * for most of the datatypes we have some special processing here * Date and time can be initialized through the long value in the XML file * Numeric types contain the actual class to be used */ private void buildColumnData() throws ParsingConverterException { if (importColumns != null && getColumnIndex(this.columns[currentColIndex].getColumnName()) < 0) return; this.currentRow[this.realColIndex] = null; if (!this.receiver.shouldProcessNextRow()) return; // the isNull flag will be set by the startElement method // as that is an attribute of the tag if (this.isNull) { this.realColIndex ++; return; } int type = this.columns[this.realColIndex].getDataType(); String value = this.chars.toString(); if (trimValues && !SqlUtil.isBlobType(type)) { value = value.trim(); } if (this.valueModifier != null) { value = this.valueModifier.modifyValue(this.columns[this.realColIndex], value); } try { if (SqlUtil.isCharacterType(type)) { // if clobs are exported as external files, than we'll have a filename in the // attribute (just like with BLOBS) if (this.columnDataFile == null) { this.currentRow[this.realColIndex] = value; } else { String fileDir = this.inputFile.getParent(); this.currentRow[this.realColIndex] = new File(fileDir, columnDataFile); } } else if (SqlUtil.isBlobType(type)) { if (columnDataFile != null) { this.currentRow[this.realColIndex] = blobDecoder.decodeBlob(columnDataFile); } else { this.currentRow[this.realColIndex] = blobDecoder.decodeBlob(value); } } else if (SqlUtil.isDateType(type)) { // For Date types we don't need the ValueConverter as already we // have a suitable long value that doesn't need parsing java.sql.Date d = new java.sql.Date(this.columnLongValue); if (type == Types.TIMESTAMP) { this.currentRow[this.realColIndex] = new java.sql.Timestamp(d.getTime()); } else { this.currentRow[this.realColIndex] = d; } } else { // for all other types we can use the ValueConverter this.currentRow[this.realColIndex] = converter.convertValue(value, type); } } catch (Exception e) { String msg = ResourceMgr.getString("ErrConvertError"); msg = StringUtil.replace(msg, "%type%", SqlUtil.getTypeName(this.columns[realColIndex].getDataType())); msg = StringUtil.replace(msg, "%column%", this.columns[realColIndex].getColumnName()); msg = StringUtil.replace(msg, "%error%", e.getMessage()); msg = StringUtil.replace(msg, "%value%", value); msg = StringUtil.replace(msg, "%row%", Integer.toString(this.currentRowNumber)); this.messages.append(msg); this.messages.appendNewLine(); if (this.abortOnError) { LogMgr.logError("XmlDataFileParser.buildColumnData()", msg, e); this.hasErrors = true; throw new ParsingConverterException(); } else { this.messages.append(ResourceMgr.getString("ErrConvertWarning")); this.hasWarnings = true; LogMgr.logWarning("XmlDataFileParser.buildColumnData()", msg, null); } } this.realColIndex ++; } private TableIdentifier getImportTable() { String tname = (this.tableName == null ? tableNameFromFile : tableName); // this is possible when importing into a DataStore if (StringUtil.isEmptyString(tname)) return null; TableIdentifier id = new TableIdentifier(tname); // this is possible when importing into a DataStore if (this.connection == null) return id; TableIdentifier tbl = this.connection.getMetadata().findTable(id); if (tbl == null) { return id; } return tbl; } private void sendTableDefinition() throws SQLException { try { TableIdentifier tbl = getImportTable(); checkTargetColumns(tbl); if (this.importColumns == null) { this.receiver.setTargetTable(tbl, Arrays.asList(this.columns)); } else { List<ColumnIdentifier> cols = new ArrayList<>(this.realColCount); for (int i=0; i < this.colCount; i++) { if (getColumnIndex(this.columns[i].getColumnName()) > -1) { cols.add(this.columns[i]); } } this.receiver.setTargetTable(tbl, cols); } this.currentRow = new Object[this.realColCount]; } catch (SQLException e) { this.currentRow = null; this.hasErrors = true; throw e; } } private boolean includeCurrentRow() { for (int colIndex = 0; colIndex < currentRow.length; colIndex ++) { Object value = currentRow[colIndex]; if (value != null) { String svalue = value.toString(); if (isColumnFiltered(colIndex, svalue)) { return false; } } } return true; } private void sendRowData() throws SAXException, Exception { if (this.receiver != null) { try { if (includeCurrentRow()) { this.receiver.processRow(this.currentRow); } } catch (Exception e) { LogMgr.logError("XmlDataFileParser.sendRowData()", "Error when sending row data to receiver", e); if (this.abortOnError) { this.hasErrors = true; throw e; } this.hasWarnings = true; if (this.errorHandler != null) { int choice = errorHandler.getActionOnError(this.currentRowNumber + 1, null, null, ExceptionUtil.getDisplay(e, false)); if (choice == JobErrorHandler.JOB_ABORT) throw e; if (choice == JobErrorHandler.JOB_IGNORE_ALL) { this.abortOnError = false; } } } } if (this.cancelImport) throw new ParsingInterruptedException(); } private void setUseVerboseFormat(boolean flag) { this.formatKnown = true; this.verboseFormat = flag; if (this.verboseFormat) { rowTag = XmlRowDataConverter.LONG_ROW_TAG; columnTag = XmlRowDataConverter.LONG_COLUMN_TAG; } else { rowTag = XmlRowDataConverter.SHORT_ROW_TAG; columnTag = XmlRowDataConverter.SHORT_COLUMN_TAG; } } private class SaxHandler extends DefaultHandler { private SaxHandler() { super(); } @Override public void startDocument() throws SAXException { Thread.yield(); if (cancelImport) throw new ParsingInterruptedException(); } @Override public void endDocument() throws SAXException { Thread.yield(); if (cancelImport) { throw new ParsingInterruptedException(); } } @Override public void startElement(String namespaceURI, String sName, String qName, Attributes attrs) throws SAXException { Thread.yield(); if (cancelImport) { throw new ParsingInterruptedException(); } if (qName.equals(rowTag)) { // row definition ended, start a new row clearRowData(); chars = null; } else if (qName.equals(columnTag)) { chars = new StringBuilder(); String attrValue = attrs.getValue(XmlRowDataConverter.ATTR_LONGVALUE); if (attrValue != null) { try { columnLongValue = Long.parseLong(attrValue); } catch (NumberFormatException e) { LogMgr.logError("XmlDataFileParser.startElement()", "Error converting longvalue", e); } } attrValue = attrs.getValue(XmlRowDataConverter.ATTR_NULL); isNull = "true".equals(attrValue); columnDataFile = attrs.getValue(XmlRowDataConverter.ATTR_DATA_FILE); } else { chars = null; } } @Override public void endElement(String namespaceURI, String sName, String qName) throws SAXException { if (cancelImport) { throw new ParsingInterruptedException(); } if (qName.equals(rowTag)) { if (!receiver.shouldProcessNextRow()) { receiver.nextRowSkipped(); } else { if (!ignoreCurrentRow) { try { sendRowData(); } catch (Exception e) { // don't need to log the error as sendRowData() has already done that. if (abortOnError) { throw new ParsingInterruptedException(); } } } } ignoreCurrentRow = false; currentRowNumber++; } else if (qName.equals(columnTag)) { buildColumnData(); currentColIndex++; } chars = null; } @Override public void characters(char[] buf, int offset, int len) throws SAXException { Thread.yield(); if (cancelImport) { throw new ParsingInterruptedException(); } if (chars != null) { chars.append(buf, offset, len); } } /** Only implemented to have even more possibilities for cancelling the import */ @Override public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { Thread.yield(); if (cancelImport) { throw new ParsingInterruptedException(); } } @Override public void processingInstruction(String target, String data) throws SAXException { Thread.yield(); if (cancelImport) { throw new ParsingInterruptedException(); } } @Override public void error(SAXParseException e) throws SAXParseException { String msg = "XML Parse error in line=" + e.getLineNumber() + ",data-row=" + (currentRowNumber); LogMgr.logError("XmlDataFileParser.error()", msg, e); ignoreCurrentRow = true; } @Override public void fatalError(SAXParseException e) throws SAXParseException { String msg = "Fatal XML parse error in line=" + e.getLineNumber() + ",data-row=" + (currentRowNumber) + "\nRest of file will be ignored!"; LogMgr.logError("XmlDataFileParser.fatalError()", msg, e); ignoreCurrentRow = true; } // dump warnings too @Override public void warning(SAXParseException err) throws SAXParseException { messages.append(ExceptionUtil.getDisplay(err)); messages.appendNewLine(); if (cancelImport) { throw err; } } } }
/** * Copyright (C) 2013-2015 VCNC Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kr.co.vcnc.haeinsa; import java.io.Closeable; import java.io.IOException; import java.util.Collection; import java.util.List; import kr.co.vcnc.haeinsa.thrift.generated.TRowLock; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.HTableFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.PoolMap; import org.apache.hadoop.hbase.util.PoolMap.PoolType; /** * Provide pooling pattern to HaeinsaTable. */ public class HaeinsaTablePool implements Closeable { // { tableName -> HaeinsaTable } private final PoolMap<String, HaeinsaTableIfaceInternal> tables; private final int maxSize; private final PoolType poolType; private final Configuration config; // null if use default factory private final HaeinsaTableIfaceFactory tableFactory; /** * Default Constructor. Default HBaseConfiguration and no limit on pool * size. */ public HaeinsaTablePool() { this(HBaseConfiguration.create(), Integer.MAX_VALUE); } /** * Constructor to set maximum versions and use the specified configuration. * * @param config configuration * @param maxSize maximum number of references to keep for each table */ public HaeinsaTablePool(final Configuration config, final int maxSize) { this(config, maxSize, null, null); } /** * Constructor to set maximum versions and use the specified configuration * and table factory. * * @param config configuration * @param maxSize maximum number of references to keep for each table * @param tableFactory table factory */ public HaeinsaTablePool(final Configuration config, final int maxSize, final HaeinsaTableIfaceFactory tableFactory) { this(config, maxSize, tableFactory, PoolType.Reusable); } /** * Constructor to set maximum versions and use the specified configuration * and pool type. * * @param config configuration * @param maxSize maximum number of references to keep for each table * @param poolType pool type which is one of {@link PoolType#Reusable} or * {@link PoolType#ThreadLocal} */ public HaeinsaTablePool(final Configuration config, final int maxSize, final PoolType poolType) { this(config, maxSize, null, poolType); } /** * Constructor to set maximum versions and use the specified configuration, * table factory and pool type. The HTablePool supports the * {@link PoolType#Reusable} and {@link PoolType#ThreadLocal}. If the pool * type is null or not one of those two values, then it will default to * {@link PoolType#Reusable}. * * @param config configuration * @param maxSize maximum number of references to keep for each table * @param tableFactory table factory * @param poolType pool type which is one of {@link PoolType#Reusable} or * {@link PoolType#ThreadLocal} */ public HaeinsaTablePool(final Configuration config, final int maxSize, final HaeinsaTableIfaceFactory tableFactory, PoolType poolType) { // Make a new configuration instance so I can safely cleanup when // done with the pool. this.config = config == null ? new Configuration() : config; this.maxSize = maxSize; this.tableFactory = tableFactory == null ? new DefaultHaeinsaTableIfaceFactory(new HTableFactory()) : tableFactory; if (poolType == null) { this.poolType = PoolType.Reusable; } else { switch (poolType) { case Reusable: case ThreadLocal: { this.poolType = poolType; break; } default: { this.poolType = PoolType.Reusable; break; } } } this.tables = new PoolMap<String, HaeinsaTableIfaceInternal>(this.poolType, this.maxSize); } /** * Get a reference to the specified table from the pool. * * @param tableName table name * @return a reference to the specified table * @throws RuntimeException if there is a problem instantiating the HTable */ public HaeinsaTableIface getTable(String tableName) { // call the old getTable implementation renamed to findOrCreateTable HaeinsaTableIfaceInternal table = findOrCreateTable(tableName); // return a proxy table so when user closes the proxy, the actual table // will be returned to the pool try { return new PooledHaeinsaTable(table); } catch (IOException ioe) { throw new RuntimeException(ioe); } } /** * Get a reference to the specified internal table interface from the pool. * * @param tableName table name * @return a reference to the specified table * @throws RuntimeException if there is a problem instantiating the HTable */ HaeinsaTableIfaceInternal getTableInternal(String tableName) { // call the old getTable implementation renamed to findOrCreateTable HaeinsaTableIfaceInternal table = findOrCreateTable(tableName); // return a proxy table so when user closes the proxy, the actual table // will be returned to the pool try { return new PooledHaeinsaTable(table); } catch (IOException ioe) { throw new RuntimeException(ioe); } } /** * Get a reference to the specified table from the pool. * <p> * Create a new one if one is not available. * * @param tableName table name * @return a reference to the specified table * @throws RuntimeException if there is a problem instantiating the HTable */ private HaeinsaTableIfaceInternal findOrCreateTable(String tableName) { HaeinsaTableIfaceInternal table = tables.get(tableName); if (table == null) { table = createHTable(tableName); } return table; } /** * Get a reference to the specified table from the pool. * <p> * Create a new one if one is not available. * * @param tableName table name * @return a reference to the specified table * @throws RuntimeException if there is a problem instantiating the HTable */ public HaeinsaTableIface getTable(byte[] tableName) { return getTable(Bytes.toString(tableName)); } /** * Get a reference to the specified internal table interface from the pool. * <p> * Create a new one if one is not available. * * @param tableName table name * @return a reference to the specified table * @throws RuntimeException if there is a problem instantiating the HTable */ HaeinsaTableIfaceInternal getTableInternal(byte[] tableName) { return getTableInternal(Bytes.toString(tableName)); } /** * Puts the specified HaeinsaTableIface back into the pool. * <p> * If the pool already contains <i>maxSize</i> references to the table, then * the table instance gets closed after flushing buffered edits. * * @param table table */ private void returnTable(HaeinsaTableIfaceInternal table) throws IOException { // this is the old putTable method renamed and made private String tableName = Bytes.toString(table.getTableName()); if (tables.size(tableName) >= maxSize) { // release table instance since we're not reusing it this.tables.remove(tableName, table); release(table); return; } tables.put(tableName, table); } protected HaeinsaTableIfaceInternal createHTable(String tableName) { return (HaeinsaTableIfaceInternal) this.tableFactory.createHaeinsaTableIface(config, Bytes.toBytes(tableName)); } private void release(HaeinsaTableIface table) throws IOException { this.tableFactory.releaseHaeinsaTableIface(table); } /** * Closes all the HaeinsaTable instances , belonging to the given table, in the * table pool. * <p> * Note: this is a 'shutdown' of the given table pool and different from * {@link #release(HaeinsaTableIface)}, that is used to return the table * instance to the pool for future re-use. */ public void closeTablePool(final String tableName) throws IOException { Collection<HaeinsaTableIfaceInternal> tables = this.tables.values(tableName); if (tables != null) { for (HaeinsaTableIface table : tables) { release(table); } } this.tables.remove(tableName); } /** * See {@link #closeTablePool(String)}. */ public void closeTablePool(final byte[] tableName) throws IOException { closeTablePool(Bytes.toString(tableName)); } /** * Closes all the HTable instances , belonging to all tables in the table * pool. * <p> * Note: this is a 'shutdown' of all the table pools. */ @Override public void close() throws IOException { for (String tableName : tables.keySet()) { closeTablePool(tableName); } this.tables.clear(); } int getCurrentPoolSize(String tableName) { return tables.size(tableName); } class PooledHaeinsaTable implements HaeinsaTableIfaceInternal { private HaeinsaTableIfaceInternal table; public PooledHaeinsaTable(HaeinsaTableIfaceInternal table) throws IOException { this.table = table; } @Override public byte[] getTableName() { return table.getTableName(); } @Override public Configuration getConfiguration() { return table.getConfiguration(); } @Override public HTableDescriptor getTableDescriptor() throws IOException { return table.getTableDescriptor(); } @Override public HaeinsaResult get(HaeinsaTransaction tx, HaeinsaGet get) throws IOException { return table.get(tx, get); } @Override public HaeinsaResultScanner getScanner(HaeinsaTransaction tx, HaeinsaScan scan) throws IOException { return table.getScanner(tx, scan); } @Override public HaeinsaResultScanner getScanner(HaeinsaTransaction tx, HaeinsaIntraScan intraScan) throws IOException { return table.getScanner(tx, intraScan); } @Override public HaeinsaResultScanner getScanner(HaeinsaTransaction tx, byte[] family) throws IOException { return table.getScanner(tx, family); } @Override public HaeinsaResultScanner getScanner(HaeinsaTransaction tx, byte[] family, byte[] qualifier) throws IOException { return table.getScanner(tx, family, qualifier); } @Override public void put(HaeinsaTransaction tx, HaeinsaPut put) throws IOException { table.put(tx, put); } @Override public void put(HaeinsaTransaction tx, List<HaeinsaPut> puts) throws IOException { table.put(tx, puts); } @Override public void delete(HaeinsaTransaction tx, HaeinsaDelete delete) throws IOException { table.delete(tx, delete); } @Override public void delete(HaeinsaTransaction tx, List<HaeinsaDelete> deletes) throws IOException { table.delete(tx, deletes); } @Override public void close() throws IOException { returnTable(table); } HaeinsaTableIfaceInternal getWrappedTable() { return table; } @Override public void checkSingleRowLock(HaeinsaRowTransaction rowState, byte[] row) throws IOException { table.checkSingleRowLock(rowState, row); } @Override public void commitSingleRowPutOnly(HaeinsaRowTransaction rowState, byte[] row) throws IOException { table.commitSingleRowPutOnly(rowState, row); } @Override public void prewrite(HaeinsaRowTransaction rowTxState, byte[] row, boolean isPrimary) throws IOException { table.prewrite(rowTxState, row, isPrimary); } @Override public void applyMutations(HaeinsaRowTransaction rowTxState, byte[] row) throws IOException { table.applyMutations(rowTxState, row); } @Override public void makeStable(HaeinsaRowTransaction rowTxState, byte[] row) throws IOException { table.makeStable(rowTxState, row); } @Override public void commitPrimary(HaeinsaRowTransaction rowTxState, byte[] row) throws IOException { table.commitPrimary(rowTxState, row); } @Override public TRowLock getRowLock(byte[] row) throws IOException { return table.getRowLock(row); } @Override public void abortPrimary(HaeinsaRowTransaction rowTxState, byte[] row) throws IOException { table.abortPrimary(rowTxState, row); } @Override public void deletePrewritten(HaeinsaRowTransaction rowTxState, byte[] row) throws IOException { table.deletePrewritten(rowTxState, row); } } }
/* * #%L * Protempa Framework * %% * Copyright (C) 2012 - 2013 Emory University * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.protempa.proposition.interval; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.protempa.graph.BellmanFord; import org.protempa.graph.DirectedGraph; import org.protempa.graph.Weight; import org.protempa.graph.WeightFactory; /** * Temporal constraint network for solving the simple temporal problem (STP) as * defined in Dechter, R. et al. Temporal Constraint Networks. Artif. Intell. * 1991;49:61-95. * * @author Andrew Post */ final class ConstraintNetwork { private static final String timeZero = "0"; private final List<Interval> intervals; private final DirectedGraph directedGraph; private Weight calcMinDuration; private Weight calcMaxDuration; private Weight calcMinFinish; private Weight calcMaxFinish; private Weight calcMinStart; private Weight calcMaxStart; private Map<?, Weight> shortestDistancesFromTimeZeroSource; private Map<?, Weight> shortestDistancesFromTimeZeroDestination; /** * Constructs an empty <code>ConstraintNetwork</code> with the default * initial interval capacity (10). */ ConstraintNetwork() { this(10); } /** * Constructs an empty <code>ConstraintNetwork</code> with the specified * initial interval capacity. * * @param initialCapacity * the initial interval capacity. */ ConstraintNetwork(int initialCapacity) { directedGraph = new DirectedGraph(initialCapacity * 2 + 1); directedGraph.add(timeZero); intervals = new ArrayList<>(initialCapacity); } synchronized void clear() { directedGraph.clear(); intervals.clear(); directedGraph.add(timeZero); calcMinDuration = null; calcMaxDuration = null; calcMinFinish = null; calcMaxFinish = null; calcMinStart = null; calcMaxStart = null; shortestDistancesFromTimeZeroSource = null; shortestDistancesFromTimeZeroDestination = null; } /** * Remove the distance relation between two intervals, if such a relation * exists. * * @param i1 * an interval. * @param i2 * another interval. * @return true if the graph changed as a result of this operation, false * otherwise. */ synchronized boolean removeRelation(Interval i1, Interval i2) { if (i1 == i2 || !containsInterval(i1) || !containsInterval(i2)) { return false; } Object i1Start = i1.getStart(); Object i1Finish = i1.getFinish(); Object i2Start = i2.getStart(); Object i2Finish = i2.getFinish(); directedGraph.setEdge(i1Start, i2Start, null); directedGraph.setEdge(i1Start, i2Finish, null); directedGraph.setEdge(i2Start, i1Start, null); directedGraph.setEdge(i2Start, i1Finish, null); directedGraph.setEdge(i1Finish, i2Start, null); directedGraph.setEdge(i1Finish, i2Finish, null); directedGraph.setEdge(i2Finish, i1Start, null); directedGraph.setEdge(i2Finish, i1Finish, null); calcMinDuration = null; calcMaxDuration = null; calcMinFinish = null; calcMaxFinish = null; calcMinStart = null; calcMaxStart = null; shortestDistancesFromTimeZeroSource = null; shortestDistancesFromTimeZeroDestination = null; return true; } /** * Remove an interval from this graph. * * @param i * an interval. * @return true if the graph changed as a result of this operation, false * otherwise. */ synchronized boolean removeInterval(Interval i) { calcMinDuration = null; calcMaxDuration = null; calcMinFinish = null; calcMaxFinish = null; calcMinStart = null; calcMaxStart = null; shortestDistancesFromTimeZeroSource = null; shortestDistancesFromTimeZeroDestination = null; if (directedGraph.remove(i.getStart()) != null) { if (directedGraph.remove(i.getFinish()) == null) { throw new IllegalStateException(); } intervals.remove(i); return true; } else { return false; } } /** * Determine if an interval is contained in this graph. * * @param i * an interval. * @return <code>true</code> if the given interval is found, * <code>false</code> otherwise. */ private boolean containsInterval(Interval i) { if (i != null) { return directedGraph.contains(i.getStart()) && directedGraph.contains(i.getFinish()); } else { return false; } } /** * Add an interval to this graph. * * @param i * an interval. * @return <code>true</code> if successful, <code>false</code> if the * interval could not be added. If there was a problem adding the * interval, then the constraint network may be in an inconsistent * state (e.g., part of the interval got added). */ synchronized boolean addInterval(Interval i) { if (i == null || containsInterval(i) || !intervals.add(i)) { return false; } Object iStart = i.getStart(); Object iFinish = i.getFinish(); directedGraph.add(iStart); directedGraph.add(iFinish); Weight mindur = i.getSpecifiedMinimumLength(); Weight maxdur = i.getSpecifiedMaximumLength(); directedGraph.setEdge(iStart, iFinish, maxdur); directedGraph.setEdge(iFinish, iStart, mindur.invertSign()); Weight minstart = i.getSpecifiedMinimumStart(); Weight maxstart = i.getSpecifiedMaximumStart(); directedGraph.setEdge(timeZero, iStart, maxstart); directedGraph.setEdge(iStart, timeZero, minstart.invertSign()); Weight minfinish = i.getSpecifiedMinimumFinish(); Weight maxfinish = i.getSpecifiedMaximumFinish(); directedGraph.setEdge(timeZero, iFinish, maxfinish); directedGraph.setEdge(iFinish, timeZero, minfinish.invertSign()); calcMinDuration = null; calcMaxDuration = null; calcMinFinish = null; calcMaxFinish = null; calcMinStart = null; calcMaxStart = null; shortestDistancesFromTimeZeroSource = null; shortestDistancesFromTimeZeroDestination = null; return true; } /** * Calculates and returns the minimum path from time zero to the start of an * interval. * * @return a <code>Weight</code> object. */ synchronized Weight getMinimumStart() { if (calcMinStart == null) { // Find the shortest distance from a start to time zero. Weight result = WeightFactory.NEG_INFINITY; if (shortestDistancesFromTimeZeroDestination == null) { shortestDistancesFromTimeZeroDestination = BellmanFord.calcShortestDistances(timeZero, directedGraph, BellmanFord.Mode.DESTINATION); if (shortestDistancesFromTimeZeroDestination == null) { throw new IllegalStateException("Negative cycle detected!"); } } for (int i = 0, n = intervals.size(); i < n; i++) { Object start = intervals.get(i).getStart(); result = Weight.max(result, (Weight) shortestDistancesFromTimeZeroDestination.get(start)); } calcMinStart = result.invertSign(); } return calcMinStart; } /** * Calculates and returns the maximum path from time zero to the start of an * interval. * * @return a <code>Weight</code> object. */ synchronized Weight getMaximumStart() { if (calcMaxStart == null) { // Find the longest distance from time zero to a start. Weight result = WeightFactory.POS_INFINITY; if (shortestDistancesFromTimeZeroSource == null) { shortestDistancesFromTimeZeroSource = BellmanFord.calcShortestDistances(timeZero, directedGraph, BellmanFord.Mode.SOURCE); if (shortestDistancesFromTimeZeroSource == null) { throw new IllegalStateException("Negative cycle detected!"); } } for (int i = 0, n = intervals.size(); i < n; i++) { Object start = intervals.get(i).getStart(); result = Weight.min(result, (Weight) shortestDistancesFromTimeZeroSource.get(start)); } calcMaxStart = result; } return calcMaxStart; } /** * Calculates and returns the minimum path from time zero to the finish of * an interval. * * @return a <code>Weight</code> object. */ synchronized Weight getMinimumFinish() { if (calcMinFinish == null) { // Find the shortest distance from a finish to time zero. Weight result = WeightFactory.POS_INFINITY; if (shortestDistancesFromTimeZeroDestination == null) { shortestDistancesFromTimeZeroDestination = BellmanFord.calcShortestDistances(timeZero, directedGraph, BellmanFord.Mode.DESTINATION); if (shortestDistancesFromTimeZeroDestination == null) { throw new IllegalStateException("Negative cycle detected!"); } } for (int i = 0, n = intervals.size(); i < n; i++) { Object finish = intervals.get(i).getFinish(); result = Weight.min(result, (Weight) shortestDistancesFromTimeZeroDestination.get(finish)); } calcMinFinish = result.invertSign(); } return calcMinFinish; } /** * Calculates and returns the maximum path from time zero to the finish of * an interval. * * @return a <code>Weight</code> object. */ synchronized Weight getMaximumFinish() { if (calcMaxFinish == null) { // Find the longest distance from time zero to a finish. Weight result = WeightFactory.NEG_INFINITY; if (shortestDistancesFromTimeZeroSource == null) { shortestDistancesFromTimeZeroSource = BellmanFord.calcShortestDistances(timeZero, directedGraph, BellmanFord.Mode.SOURCE); if (shortestDistancesFromTimeZeroSource == null) { throw new IllegalStateException("Negative cycle detected!"); } } for (int i = 0, n = intervals.size(); i < n; i++) { Object finish = intervals.get(i).getFinish(); result = Weight.max(result, (Weight) shortestDistancesFromTimeZeroSource.get(finish)); } calcMaxFinish = result; } return calcMaxFinish; } /** * Calculates and returns the maximum time distance from the start of an * interval to the finish of an interval. * * @return a <code>Weight</code> object. */ synchronized Weight getMaximumDuration() { if (calcMaxDuration == null) { Weight max = WeightFactory.ZERO; for (int i = 0, n = intervals.size(); i < n; i++) { Object start = intervals.get(i).getStart(); Map<?, Weight> d = BellmanFord.calcShortestDistances(start, directedGraph, BellmanFord.Mode.SOURCE); if (d == null) { throw new IllegalStateException("Negative cycle detected!"); } for (int j = 0; j < n; j++) { Object finish = intervals.get(j).getFinish(); max = Weight.max(max, d.get(finish)); } } calcMaxDuration = max; } return calcMaxDuration; } /** * Calculates and returns the minimum time distance from the start of an * interval to the finish of an interval. * * @return a <code>Weight</code> object. */ synchronized Weight getMinimumDuration() { if (calcMinDuration == null) { Weight min = WeightFactory.POS_INFINITY; for (int i = 0, n = intervals.size(); i < n; i++) { Object finish = intervals.get(i).getFinish(); Map<?, Weight> d = BellmanFord.calcShortestDistances(finish, directedGraph, BellmanFord.Mode.SOURCE); if (d == null) { throw new IllegalStateException("Negative cycle detected!"); } for (int j = 0; j < n; j++) { Object start = intervals.get(j).getStart(); min = Weight.min(min, d.get(start)); } } calcMinDuration = min.invertSign(); } return calcMinDuration; } /** * Returns whether this constraint network is consistent. A constraint * network is consistent if and only if its distance graph has no negative * cycles. * * @return <code>true</code> if this network is consistent, * <code>false</code> otherwise. */ synchronized boolean getConsistent() { return DirectionalPathConsistency.getConsistent(directedGraph); } }
package apoc.couchbase; import apoc.Extended; import apoc.couchbase.document.CouchbaseByteArrayDocument; import apoc.couchbase.document.CouchbaseJsonDocument; import apoc.couchbase.document.CouchbaseQueryResult; import apoc.couchbase.document.CouchbaseUtils; import apoc.result.BooleanResult; import apoc.util.MissingDependencyException; import com.couchbase.client.java.Collection; import com.couchbase.client.java.BinaryCollection; import com.couchbase.client.java.Cluster; import com.couchbase.client.java.json.JsonObject; import com.couchbase.client.java.kv.GetResult; import com.couchbase.client.java.kv.MutationResult; import com.couchbase.client.java.query.QueryOptions; import org.neo4j.procedure.Description; import org.neo4j.procedure.Name; import org.neo4j.procedure.Procedure; import java.util.List; import java.util.Map; import java.util.stream.Stream; /** * Neo4j Procedures for <b>Couchbase integration</b>. * <p/> * All of the operations performed against the Couchbase Server are done through * a {@link CouchbaseConnection}. * <br/> * A {@link CouchbaseConnection} can be created via the {@link CouchbaseManager} * .{@link CouchbaseManager#getConnection getConnection} method. * <p/> * Available operations are: * <ul> * <li>{@link #get} to retrieve a json document by its unique ID</li> * <li>{@link #exists} to check whether a json document with the given ID does exist</li> * <li>{@link #insert} to insert a json document if it does not exist already</li> * <li>{@link #upsert} to insert or overwrite a json document</li> * <li>{@link #remove} to remove the json document identified by its unique ID</li> * <li>{@link #replace} to replace the content of the json document identified by its unique ID</li> * <li>{@link #append} to append a json document's content to an existing one</li> * <li>{@link #prepend} to prepend a json document's content to an existing one</li> * </ul> * <p/> * N1QL query can be executed via the following methods: * <ul> * <li>{@link #query} for plain un-parameterized N1QL statements</li> * <li>{@link #posParamsQuery} for N1QL statements with positional parameters</li> * <li>{@link #namedParamsQuery} for N1QL statements with named parameters</li> * </ul> * For instance, after inserting a JSON document this way: * <p/> * <code> * call apoc.couchbase.insert(['localhost'], 'default', 'artist:vincent_van_gogh', '{"firstName":"Vincent","secondName":"Willem","lastName":"Van Gogh","notableWorks":["Starry Night","Sunflowers","Bedroom in Arles","Portrait of Dr Gachet","Sorrow"]}') * </code> * <p/> * you can read the just inserted document via: * <p/> * <code> * call apoc.couchbase.query(['localhost'], 'default', 'select * from default') yield queryResult<br/> * unwind queryResult as queryResultRow<br/> * call apoc.convert.toMap(queryResultRow) yield value as queryResultRowMap<br/> * with queryResultRowMap.default as content<br/> * return content.firstName, content.secondName, content.lastName, content.notableWorks * </code> * <p/> * Using JSON fields instead of the &quot;*&quot; notation makes things a bit easier: * <p/> * <code> * call apoc.couchbase.query(['localhost'], 'default', 'select firstName, secondName, lastName, notableWorks from default') yield queryResult<br/> * unwind queryResult as queryRow<br/> * return queryRow.firstName, queryRow.secondName, queryRow.lastName, queryRow.notableWorks * </code> * * @author inserpio * @since 15.8.2016 */ @Extended public class Couchbase { /** * Retrieves a {@link GetResult} by its unique ID. * <p/> * Example: * <code>call apoc.couchbase.get('localhost', 'default', 'artist:vincent_van_gogh') yield id, expiry, cas, mutationToken, content</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @return the found {@link CouchbaseJsonDocument} or null if not found * @see Collection#get(String) */ @Procedure @Description("apoc.couchbase.get(hostOrKey, bucket, documentId) yield id, expiry, cas, mutationToken, content - retrieves a couchbase json document by its unique ID.") public Stream<CouchbaseJsonDocument> get(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { final GetResult getResult = couchbaseConnection.get(documentId); return getResult == null ? Stream.empty() : Stream.of(new CouchbaseJsonDocument(getResult, documentId)); } } /** * Check whether a document with the given ID does exist. * <p/> * Example: * <code>CALL apoc.couchbase.exists('localhost', 'default', 'artist:vincent_van_gogh') yield value</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @return true if it exists, false otherwise. * @see Collection#exists(String) */ @Procedure @Description("apoc.couchbase.exists(hostOrKey, bucket, documentId) yield value - check whether a couchbase json document with the given ID does exist.") public Stream<BooleanResult> exists(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { return Stream.of(new BooleanResult(couchbaseConnection.exists(documentId))); } } /** * Insert a document if it does not exist already. * <p/> * Example: * <code>CALL apoc.couchbase.insert('localhost', 'default', 'artist:vincent_van_gogh', '{"firstName":"Vincent","secondName":"Willem","lastName":"Van Gogh","notableWorks":["Starry Night","Sunflowers","Bedroom in Arles","Portrait of Dr Gachet","Sorrow"]}') yield id, expiry, cas, mutationToken, content</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @param json the JSON String representing the document to store * @return the newly created document * @see Collection#insert(String, Object) */ @Procedure @Description("apoc.couchbase.insert(hostOrKey, bucket, documentId, jsonDocument) yield id, expiry, cas, mutationToken, content - insert a couchbase json document with its unique ID.") public Stream<CouchbaseJsonDocument> insert(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name("json") String json, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { final MutationResult insert = couchbaseConnection.insert(documentId, json); GetResult getResult = couchbaseConnection.get(documentId); return Stream.of(new CouchbaseJsonDocument(getResult, documentId, insert.mutationToken().orElse(null))); } } /** * Insert or overwrite a document. * <p/> * Example: * <code>CALL apoc.couchbase.upsert('localhost', 'default', 'artist:vincent_van_gogh', '{"firstName":"Vincent","secondName":"Willem","lastName":"Van Gogh","notableWorks":["Starry Night","Sunflowers","Bedroom in Arles","Portrait of Dr Gachet","Sorrow"]}') yield id, expiry, cas, mutationToken, content</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @param json the JSON String representing the document to store * @return the newly created or overwritten document or null in * case of exception * @see Collection#upsert(String, Object) */ @Procedure @Description("apoc.couchbase.upsert(hostOrKey, bucket, documentId, jsonDocument) yield id, expiry, cas, mutationToken, content - insert or overwrite a couchbase json document with its unique ID.") public Stream<CouchbaseJsonDocument> upsert(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name("json") String json, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { final MutationResult upsert = couchbaseConnection.upsert(documentId, json); GetResult getResult = couchbaseConnection.get(documentId); return Stream.of(new CouchbaseJsonDocument(getResult, documentId, upsert.mutationToken().orElse(null))); } } /** * Append a document's content to an existing one. * <p/> * Example: * <code>CALL apoc.couchbase.append('localhost', 'default', 'artist:vincent_van_gogh', 'hello world'.getBytes()) yield id, expiry, cas, mutationToken, content</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @param content the byte[] representing the document to append * @return the updated document or null in case of exception * @see BinaryCollection#append(String, byte[]) */ @Procedure @Description("apoc.couchbase.append(hostOrKey, bucket, documentId, content) yield id, expiry, cas, mutationToken, content - append a couchbase json document to an existing one.") public Stream<CouchbaseByteArrayDocument> append(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name("content") byte[] content, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { final MutationResult append = couchbaseConnection.append(documentId, content); GetResult getResult = couchbaseConnection.getBinary(documentId); return Stream.of(new CouchbaseByteArrayDocument(getResult, documentId, append.mutationToken().orElse(null))); } } /** * Prepend a document's content to an existing one. * <p/> * Example: * <code>CALL apoc.couchbase.prepend('localhost', 'default', 'artist:vincent_van_gogh', 'hello world'.getBytes()) yield id, expiry, cas, mutationToken, content</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @param content the byte[] representing the document to prepend * @return the updated document or null in case of exception * @see BinaryCollection#prepend(String, byte[]) */ @Procedure @Description("apoc.couchbase.prepend(hostOrKey, bucket, documentId, content) yield id, expiry, cas, mutationToken, content - prepend a couchbase json document to an existing one.") public Stream<CouchbaseByteArrayDocument> prepend(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name("content") byte[] content, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { final MutationResult prepend = couchbaseConnection.prepend(documentId, content); GetResult getResult = couchbaseConnection.getBinary(documentId); return Stream.of(new CouchbaseByteArrayDocument(getResult, documentId, prepend.mutationToken().orElse(null))); } } /** * Removes the document identified by its unique ID. * <p/> * Example: * <code>CALL apoc.couchbase.remove(['localhost'], 'default', 'artist:vincent_van_gogh') yield id, expiry, cas, mutationToken, content</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @return the removed document * @see Collection#remove(String) */ @Procedure @Description("apoc.couchbase.remove(hostOrKey, bucket, documentId) yield id, expiry, cas, mutationToken, content - remove the couchbase json document identified by its unique ID.") public Stream<CouchbaseJsonDocument> remove(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { GetResult getResult = couchbaseConnection.get(documentId); final MutationResult remove = couchbaseConnection.remove(documentId); return Stream.of(new CouchbaseJsonDocument(getResult, documentId, remove.mutationToken().orElse(null))); } } /** * Replace the content of the document identified by its unique * ID. * <p/> * Example: * <code>CALL apoc.couchbase.replace('localhost', 'default', 'artist:vincent_van_gogh', '{"firstName":"Vincent","secondName":"Willem","lastName":"Van Gogh","notableWorks":["Starry Night","Sunflowers","Bedroom in Arles","Portrait of Dr Gachet","Sorrow"],"placeOfBirth":"Zundert","placeOfDeath":" Auvers-sur-Oise"}') yield id, expiry, cas, mutationToken, content</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param documentId the unique ID of the document * @param json the JSON String representing the document to prepend * @return the replaced document * @see Collection#replace(String, Object) */ @Procedure @Description("apoc.couchbase.replace(hostOrKey, bucket, documentId, jsonDocument) yield id, expiry, cas, mutationToken, content - replace the content of the couchbase json document identified by its unique ID.") public Stream<CouchbaseJsonDocument> replace(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("documentId") String documentId, @Name("json") String json, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { final MutationResult replace = couchbaseConnection.replace(documentId, json); GetResult getResult = couchbaseConnection.get(documentId); return Stream.of(new CouchbaseJsonDocument(getResult, documentId, replace.mutationToken().orElse(null))); } } /** * Executes a plain un-parameterized N1QL statement. * <p/> * Example: * <code>CALL apoc.couchbase.query('localhost', 'default', 'select * from default where lastName = "Van Gogh"']) yield queryResult</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param statement the raw statement string to execute * @return the list of {@link JsonObject}s retrieved by this query in the form * of a {@link CouchbaseQueryResult} * @see Cluster#query(String) */ @Procedure @Description("apoc.couchbase.query(hostOrKey, bucket, statement) yield queryResult - executes a plain un-parameterized N1QL statement.") public Stream<CouchbaseQueryResult> query(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("statement") String statement, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { List<JsonObject> statementResult = couchbaseConnection.executeStatement(statement); final CouchbaseQueryResult result = CouchbaseUtils.convertToCouchbaseQueryResult(statementResult); return result == null ? Stream.empty() : Stream.of(result); } } /** * Executes a N1QL statement with positional parameters. * <p/> * Example: * <code>CALL apoc.couchbase.posParamsQuery('localhost', 'default', 'select * from default where lastName = $1', ['Van Gogh']) yield queryResult</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param statement the raw statement string to execute (containing positional * placeholders: $1, $2, ...) * @param params the values for the positional placeholders in statement * @return the list of {@link JsonObject}s retrieved by this query in the form * of a {@link CouchbaseQueryResult} * @see Cluster#query(String, QueryOptions) */ @Procedure @Description("apoc.couchbase.posParamsQuery(hostOrKey, bucket, statement, params) yield queryResult - executes a N1QL statement with positional parameters.") public Stream<CouchbaseQueryResult> posParamsQuery(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("statement") String statement, @Name("params") List<Object> params, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { List<JsonObject> statementResult = couchbaseConnection.executeParameterizedStatement(statement, params); final CouchbaseQueryResult result = CouchbaseUtils.convertToCouchbaseQueryResult(statementResult); return result == null ? Stream.empty() : Stream.of(result); } } /** * Executes a N1QL statement with named parameters. * <p/> * Example: * <code>CALL apoc.couchbase.namedParamsQuery('localhost', 'default', 'select * from default where lastName = $lastName', ['lastName'], ['Van Gogh']) yield queryResult</code> * * @param hostOrKey a URI to use when connecting to the cluster reference or a configuration key * @param bucket the bucket to open; if null is passed then it's used the "default" * bucket * @param statement the raw statement string to execute (containing named * placeholders: $param1, $param2, ...) * @param paramNames the placeholders' names in statement * @param paramValues the values for the named placeholders in statement * @return the list of {@link JsonObject}s retrieved by this query in the form * of a {@link CouchbaseQueryResult} * @see Cluster#query(String, QueryOptions) */ @Procedure @Description("apoc.couchbase.namedParamsQuery(hostkOrKey, bucket, statement, paramNames, paramValues) yield queryResult - executes a N1QL statement with named parameters.") public Stream<CouchbaseQueryResult> namedParamsQuery(@Name("hostOrKey") String hostOrKey, @Name("bucket") String bucket, @Name("statement") String statement, @Name("paramNames") List<String> paramNames, @Name("paramValues") List<Object> paramValues, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) { try (CouchbaseConnection couchbaseConnection = getCouchbaseConnection(hostOrKey, bucket, config)) { List<JsonObject> statementResult = couchbaseConnection.executeParameterizedStatement(statement, paramNames, paramValues); final CouchbaseQueryResult result = CouchbaseUtils.convertToCouchbaseQueryResult(statementResult); return result == null ? Stream.empty() : Stream.of(result); } } private CouchbaseConnection getCouchbaseConnection(String hostOrKey, String bucket, Map<String, Object> configMap) { CouchbaseConfig config = new CouchbaseConfig(configMap); try { return CouchbaseManager.getConnection(hostOrKey, bucket, config); } catch (NoClassDefFoundError e) { throw new MissingDependencyException("Cannot find the jar into the plugins folder. \n" + "Please put these jar in the plugins folder : \n\n" + "java-client-x.y.z.jar\n" + "\n" + "core-io-x.y.z.jar\n" + "\n" + "rxjava-x.y.z.jar\n" + "\n" + "See the documentation: https://neo4j-contrib.github.io/neo4j-apoc-procedures/#_interacting_with_couchbase"); } } }
/* * Copyright 2011 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.errorprone.annotations.Immutable; import com.google.javascript.rhino.ClosurePrimitive; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.NominalTypeBuilder; import com.google.javascript.rhino.StaticSourceFile; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSTypeRegistry; import com.google.javascript.rhino.jstype.ObjectType; import java.util.Collection; import java.util.List; import java.util.Map; /** * Helper classes for dealing with coding conventions. */ public final class CodingConventions { private CodingConventions() {} /** Gets the default coding convention. */ public static CodingConvention getDefault() { return new DefaultCodingConvention(); } /** * @param n The last statement of a block to check for an always throws * function call. Used by CheckMissingReturn. * @param alwaysThrowsFunctionName The name of a function that always throws. * @return {@code true} if n is call to alwaysThrowsFunctionName, otherwise * {@code false}. */ public static boolean defaultIsFunctionCallThatAlwaysThrows( Node n, String alwaysThrowsFunctionName) { if (n.isExprResult()) { if (!n.getFirstChild().isCall()) { return false; } } else if (!n.isCall()) { return false; } if (n.isExprResult()) { n = n.getFirstChild(); } // n is a call return n.getFirstChild().matchesQualifiedName(alwaysThrowsFunctionName); } /** * A convention that wraps another. * * When you want to support a new library, you should subclass this * delegate, and override the methods that you want to customize. * * This way, a person using jQuery and Closure Library can create a new * coding convention by creating a jQueryCodingConvention that delegates * to a ClosureCodingConvention that delegates to a DefaultCodingConvention. */ @Immutable public static class Proxy implements CodingConvention { protected final CodingConvention nextConvention; protected Proxy(CodingConvention convention) { this.nextConvention = convention; } @Override public boolean isConstant(String variableName) { return nextConvention.isConstant(variableName); } @Override public boolean isConstantKey(String keyName) { return nextConvention.isConstantKey(keyName); } @Override public boolean isValidEnumKey(String key) { return nextConvention.isValidEnumKey(key); } @Override public boolean isOptionalParameter(Node parameter) { return nextConvention.isOptionalParameter(parameter); } @Override public boolean isVarArgsParameter(Node parameter) { return nextConvention.isVarArgsParameter(parameter); } @Override public boolean isFunctionCallThatAlwaysThrows(Node n) { return nextConvention.isFunctionCallThatAlwaysThrows(n); } @Override public boolean isExported(String name, boolean local) { return nextConvention.isExported(name, local); } @Override public final boolean isExported(String name) { return CodingConvention.super.isExported(name); } @Override public String getPackageName(StaticSourceFile source) { return nextConvention.getPackageName(source); } @Override public boolean blockRenamingForProperty(String name) { return nextConvention.blockRenamingForProperty(name); } @Override public boolean isPrivate(String name) { return nextConvention.isPrivate(name); } @Override public boolean hasPrivacyConvention() { return nextConvention.hasPrivacyConvention(); } @Override public SubclassRelationship getClassesDefinedByCall(Node callNode) { return nextConvention.getClassesDefinedByCall(callNode); } @Override public boolean isClassFactoryCall(Node callNode) { return nextConvention.isClassFactoryCall(callNode); } @Override public boolean isSuperClassReference(String propertyName) { return nextConvention.isSuperClassReference(propertyName); } @Override public boolean extractIsModuleFile(Node node, Node parent) { return nextConvention.extractIsModuleFile(node, parent); } @Override public String extractClassNameIfProvide(Node node, Node parent) { return nextConvention.extractClassNameIfProvide(node, parent); } @Override public String extractClassNameIfRequire(Node node, Node parent) { return nextConvention.extractClassNameIfRequire(node, parent); } @Override public String getExportPropertyFunction() { return nextConvention.getExportPropertyFunction(); } @Override public String getExportSymbolFunction() { return nextConvention.getExportSymbolFunction(); } @Override public List<String> identifyTypeDeclarationCall(Node n) { return nextConvention.identifyTypeDeclarationCall(n); } @Override public void applySubclassRelationship( NominalTypeBuilder parent, NominalTypeBuilder child, SubclassType type) { nextConvention.applySubclassRelationship(parent, child, type); } @Override public String getAbstractMethodName() { return nextConvention.getAbstractMethodName(); } @Override public String getSingletonGetterClassName(Node callNode) { return nextConvention.getSingletonGetterClassName(callNode); } @Override public void applySingletonGetter( NominalTypeBuilder classType, FunctionType getterType) { nextConvention.applySingletonGetter(classType, getterType); } @Override public boolean isInlinableFunction(Node n) { return nextConvention.isInlinableFunction(n); } @Override public DelegateRelationship getDelegateRelationship(Node callNode) { return nextConvention.getDelegateRelationship(callNode); } @Override public void applyDelegateRelationship( NominalTypeBuilder delegateSuperclass, NominalTypeBuilder delegateBase, NominalTypeBuilder delegator, ObjectType delegateProxy, FunctionType findDelegate) { nextConvention.applyDelegateRelationship( delegateSuperclass, delegateBase, delegator, delegateProxy, findDelegate); } @Override public String getDelegateSuperclassName() { return nextConvention.getDelegateSuperclassName(); } @Override public void checkForCallingConventionDefinitions( Node n, Map<String, String> delegateCallingConventions) { nextConvention.checkForCallingConventionDefinitions( n, delegateCallingConventions); } @Override public void defineDelegateProxyPrototypeProperties( JSTypeRegistry registry, List<NominalTypeBuilder> delegateProxies, Map<String, String> delegateCallingConventions) { nextConvention.defineDelegateProxyPrototypeProperties( registry, delegateProxies, delegateCallingConventions); } @Override public Collection<AssertionFunctionSpec> getAssertionFunctions() { return nextConvention.getAssertionFunctions(); } @Override public Bind describeFunctionBind(Node n) { return describeFunctionBind(n, false, false); } @Override public Bind describeFunctionBind( Node n, boolean callerChecksTypes, boolean iCheckTypes) { return nextConvention .describeFunctionBind(n, callerChecksTypes, iCheckTypes); } @Override public Cache describeCachingCall(Node node) { return nextConvention.describeCachingCall(node); } @Override public boolean isPropertyTestFunction(Node call) { return nextConvention.isPropertyTestFunction(call); } @Override public boolean isPropertyRenameFunction(String name) { return nextConvention.isPropertyRenameFunction(name); } @Override public boolean isPrototypeAlias(Node getProp) { return false; } @Override public ObjectLiteralCast getObjectLiteralCast(Node callNode) { return nextConvention.getObjectLiteralCast(callNode); } @Override public Collection<String> getIndirectlyDeclaredProperties() { return nextConvention.getIndirectlyDeclaredProperties(); } } /** * The default coding convention. * Should be at the bottom of all proxy chains. */ @Immutable private static class DefaultCodingConvention implements CodingConvention { private static final long serialVersionUID = 1L; @Override public boolean isConstant(String variableName) { return false; } @Override public boolean isConstantKey(String variableName) { return false; } @Override public boolean isValidEnumKey(String key) { return key != null && key.length() > 0; } @Override public boolean isOptionalParameter(Node parameter) { return false; } @Override public boolean isVarArgsParameter(Node parameter) { // be as lax as possible return parameter.isRest(); } @Override public boolean isFunctionCallThatAlwaysThrows(Node n) { if (NodeUtil.isExprCall(n)) { FunctionType fnType = FunctionType.toMaybeFunctionType(n.getFirstFirstChild().getJSType()); return fnType != null && ClosurePrimitive.ASSERTS_FAIL == fnType.getClosurePrimitive(); } return false; } @Override public String getPackageName(StaticSourceFile source) { // The package name of a source file is its file path. String name = source.getName(); int lastSlash = name.lastIndexOf('/'); return lastSlash == -1 ? "" : name.substring(0, lastSlash); } @Override public boolean isExported(String name, boolean local) { return local && name.startsWith("$super"); } @Override public final boolean isExported(String name) { return CodingConvention.super.isExported(name); } @Override public boolean blockRenamingForProperty(String name) { return false; } @Override public boolean isPrivate(String name) { return false; } @Override public boolean hasPrivacyConvention() { return false; } @Override public SubclassRelationship getClassesDefinedByCall(Node callNode) { Node callName = callNode.getFirstChild(); if ((callName.matchesQualifiedName("$jscomp.inherits") || callName.matchesName("$jscomp$inherits")) && callNode.hasXChildren(3)) { Node subclass = callName.getNext(); Node superclass = subclass.getNext(); // The StripCode pass may create $jscomp.inherits calls with NULL arguments. if (subclass.isQualifiedName() && superclass.isQualifiedName()) { return new SubclassRelationship(SubclassType.INHERITS, subclass, superclass); } } return null; } @Override public boolean isClassFactoryCall(Node callNode) { return false; } @Override public boolean isSuperClassReference(String propertyName) { return false; } @Override public boolean extractIsModuleFile(Node node, Node parent) { String message = "only implemented in ClosureCodingConvention"; throw new UnsupportedOperationException(message); } @Override public String extractClassNameIfProvide(Node node, Node parent) { String message = "only implemented in ClosureCodingConvention"; throw new UnsupportedOperationException(message); } @Override public String extractClassNameIfRequire(Node node, Node parent) { String message = "only implemented in ClosureCodingConvention"; throw new UnsupportedOperationException(message); } @Override public String getExportPropertyFunction() { return null; } @Override public String getExportSymbolFunction() { return null; } @Override public List<String> identifyTypeDeclarationCall(Node n) { return null; } @Override public void applySubclassRelationship( NominalTypeBuilder parent, NominalTypeBuilder child, SubclassType type) { // do nothing } @Override public String getAbstractMethodName() { return null; } @Override public String getSingletonGetterClassName(Node callNode) { return null; } @Override public void applySingletonGetter( NominalTypeBuilder classType, FunctionType getterType) { // do nothing. } @Override public boolean isInlinableFunction(Node n) { checkState(n.isFunction(), n); return true; } @Override public DelegateRelationship getDelegateRelationship(Node callNode) { return null; } @Override public void applyDelegateRelationship( NominalTypeBuilder delegateSuperclass, NominalTypeBuilder delegateBase, NominalTypeBuilder delegator, ObjectType delegateProxy, FunctionType findDelegate) { // do nothing. } @Override public String getDelegateSuperclassName() { return null; } @Override public void checkForCallingConventionDefinitions(Node n, Map<String, String> delegateCallingConventions) { // do nothing. } @Override public void defineDelegateProxyPrototypeProperties( JSTypeRegistry registry, List<NominalTypeBuilder> delegateProxies, Map<String, String> delegateCallingConventions) { // do nothing. } @Override @SuppressWarnings("ReferenceEquality") public boolean isPropertyTestFunction(Node call) { // Avoid building the qualified name and check for // "goog.isArray" Node target = call.getFirstChild(); if (target.isGetProp()) { Node src = target.getFirstChild(); String prop = target.getLastChild().getString(); if (src.isName() && src.getString() == "Array" && prop == "isArray") { return true; } } return false; } @Override public boolean isPropertyRenameFunction(String name) { return NodeUtil.JSC_PROPERTY_NAME_FN.equals(name) || "$jscomp.reflectProperty".equals(name); } @Override public boolean isPrototypeAlias(Node getProp) { return false; } @Override public ObjectLiteralCast getObjectLiteralCast(Node callNode) { return null; } @Override public ImmutableSet<AssertionFunctionSpec> getAssertionFunctions() { return ImmutableSet.of( AssertionFunctionSpec.forTruthy() .setClosurePrimitive(ClosurePrimitive.ASSERTS_TRUTHY) .build(), AssertionFunctionSpec.forMatchesReturn() .setClosurePrimitive(ClosurePrimitive.ASSERTS_MATCHES_RETURN) .build()); } @Override public Bind describeFunctionBind(Node n) { return describeFunctionBind(n, false, false); } @Override public Bind describeFunctionBind( Node n, boolean callerChecksTypes, boolean iCheckTypes) { if (!n.isCall()) { return null; } Node callTarget = n.getFirstChild(); if (callTarget.isQualifiedName()) { if (callTarget.matchesQualifiedName("Function.prototype.bind.call")) { // goog.bind(fn, self, args...); Node fn = callTarget.getNext(); if (fn == null) { return null; } Node thisValue = safeNext(fn); Node parameters = safeNext(thisValue); return new Bind(fn, thisValue, parameters); } } if (callTarget.isGetProp() && callTarget.getLastChild().getString().equals("bind")) { Node maybeFn = callTarget.getFirstChild(); com.google.javascript.rhino.jstype.JSType maybeFnType = maybeFn.getJSType(); FunctionType fnType = null; if (iCheckTypes && maybeFnType != null) { fnType = maybeFnType.restrictByNotNullOrUndefined() .toMaybeFunctionType(); } if (fnType != null || callerChecksTypes || maybeFn.isFunction()) { // (function(){}).bind(self, args...); Node thisValue = callTarget.getNext(); Node parameters = safeNext(thisValue); return new Bind(maybeFn, thisValue, parameters); } } return null; } @Override public Cache describeCachingCall(Node node) { return null; } @Override public Collection<String> getIndirectlyDeclaredProperties() { return ImmutableList.of(); } private static Node safeNext(Node n) { if (n != null) { return n.getNext(); } return null; } } }
package controllers; import com.feth.play.module.pa.PlayAuthenticate; import models.TokenAction; import models.TokenAction.Type; import models.User; import play.data.Form; import play.i18n.Messages; import play.mvc.Controller; import play.mvc.Result; import providers.MyLoginUsernamePasswordAuthUser; import providers.MyUsernamePasswordAuthProvider; import providers.MyUsernamePasswordAuthProvider.MyIdentity; import providers.MyUsernamePasswordAuthUser; import views.html.account.signup.unverified; import views.html.account.signup.password_forgot; import views.html.account.signup.no_token_or_invalid; import views.html.account.signup.password_reset; import views.html.account.signup.oAuthDenied; import views.html.account.signup.exists; import controllers.routes; import static play.data.Form.form; /** * User.java is written for User Entity * * @version 1.0 1st June 2015 * @author Shekhar Ullah */ public class Signup extends Controller { public static class PasswordReset extends Account.PasswordChange { public PasswordReset() { } public PasswordReset(final String token) { this.token = token; } public String token; public String getToken() { return token; } public void setToken(String token) { this.token = token; } } private static final Form<PasswordReset> PASSWORD_RESET_FORM = form(PasswordReset.class); public static Result unverified() { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); return ok(unverified.render()); } private static final Form<MyIdentity> FORGOT_PASSWORD_FORM = form(MyIdentity.class); public static Result forgotPassword(final String email) { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); Form<MyIdentity> form = FORGOT_PASSWORD_FORM; if (email != null && !email.trim().isEmpty()) { form = FORGOT_PASSWORD_FORM.fill(new MyIdentity(email)); } return ok(password_forgot.render(form)); } public static Result doForgotPassword() { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); final Form<MyIdentity> filledForm = FORGOT_PASSWORD_FORM .bindFromRequest(); if (filledForm.hasErrors()) { // User did not fill in his/her email return badRequest(password_forgot.render(filledForm)); } else { // The email address given *BY AN UNKNWON PERSON* to the form - we // should find out if we actually have a user with this email // address and whether password login is enabled for him/her. Also // only send if the email address of the user has been verified. final String email = filledForm.get().email; // We don't want to expose whether a given email address is signed // up, so just say an email has been sent, even though it might not // be true - that's protecting our user privacy. flash(Application.FLASH_MESSAGE_KEY, Messages.get( "playauthenticate.reset_password.message.instructions_sent", email)); final User user = User.findByEmail(email); if (user != null) { // yep, we have a user with this email that is active - we do // not know if the user owning that account has requested this // reset, though. final MyUsernamePasswordAuthProvider provider = MyUsernamePasswordAuthProvider .getProvider(); // User exists if (user.emailValidated) { provider.sendPasswordResetMailing(user, ctx()); // In case you actually want to let (the unknown person) // know whether a user was found/an email was sent, use, // change the flash message } else { // We need to change the message here, otherwise the user // does not understand whats going on - we should not verify // with the password reset, as a "bad" user could then sign // up with a fake email via OAuth and get it verified by an // a unsuspecting user that clicks the link. flash(Application.FLASH_MESSAGE_KEY, Messages.get("playauthenticate.reset_password.message.email_not_verified")); // You might want to re-send the verification email here... provider.sendVerifyEmailMailingAfterSignup(user, ctx()); } } return redirect(routes.Application.index()); } } /** * Returns a token object if valid, null if not * * @param token * @param type * @return */ private static TokenAction tokenIsValid(final String token, final Type type) { TokenAction ret = null; if (token != null && !token.trim().isEmpty()) { final TokenAction ta = TokenAction.findByToken(token, type); if (ta != null && ta.isValid()) { ret = ta; } } return ret; } public static Result resetPassword(final String token) { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); final TokenAction ta = tokenIsValid(token, Type.PASSWORD_RESET); if (ta == null) { return badRequest(no_token_or_invalid.render()); } return ok(password_reset.render(PASSWORD_RESET_FORM .fill(new PasswordReset(token)))); } public static Result doResetPassword() { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); final Form<PasswordReset> filledForm = PASSWORD_RESET_FORM .bindFromRequest(); if (filledForm.hasErrors()) { return badRequest(password_reset.render(filledForm)); } else { final String token = filledForm.get().token; final String newPassword = filledForm.get().password; final TokenAction ta = tokenIsValid(token, Type.PASSWORD_RESET); if (ta == null) { return badRequest(no_token_or_invalid.render()); } final User u = ta.targetUser; try { // Pass true for the second parameter if you want to // automatically create a password and the exception never to // happen u.resetPassword(new MyUsernamePasswordAuthUser(newPassword), false); } catch (final RuntimeException re) { flash(Application.FLASH_MESSAGE_KEY, Messages.get("playauthenticate.reset_password.message.no_password_account")); } final boolean login = MyUsernamePasswordAuthProvider.getProvider() .isLoginAfterPasswordReset(); if (login) { // automatically log in flash(Application.FLASH_MESSAGE_KEY, Messages.get("playauthenticate.reset_password.message.success.auto_login")); return PlayAuthenticate.loginAndRedirect(ctx(), new MyLoginUsernamePasswordAuthUser(u.email)); } else { // send the user to the login page flash(Application.FLASH_MESSAGE_KEY, Messages.get("playauthenticate.reset_password.message.success.manual_login")); } return redirect(routes.Application.login()); } } public static Result oAuthDenied(final String getProviderKey) { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); return ok(oAuthDenied.render(getProviderKey)); } public static Result exists() { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); return ok(exists.render()); } public static Result verify(final String token) { com.feth.play.module.pa.controllers.Authenticate.noCache(response()); final TokenAction ta = tokenIsValid(token, Type.EMAIL_VERIFICATION); if (ta == null) { return badRequest(no_token_or_invalid.render()); } final String email = ta.targetUser.email; User.verify(ta.targetUser); flash(Application.FLASH_MESSAGE_KEY, Messages.get("playauthenticate.verify_email.success", email)); if (Application.getLocalUser(session()) != null) { return redirect(routes.Application.index()); } else { return redirect(routes.Application.login()); } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.importexport; import org.w3c.dom.*; import java.net.*; import java.util.*; import java.util.Map.Entry; import org.apache.commons.logging.*; import com.amazonaws.*; import com.amazonaws.auth.*; import com.amazonaws.handlers.*; import com.amazonaws.http.*; import com.amazonaws.internal.*; import com.amazonaws.metrics.*; import com.amazonaws.regions.*; import com.amazonaws.transform.*; import com.amazonaws.util.*; import com.amazonaws.util.AWSRequestMetrics.Field; import com.amazonaws.services.importexport.model.*; import com.amazonaws.services.importexport.model.transform.*; /** * Client for accessing AWS Import/Export. All service calls made using this * client are blocking, and will not return until the service call completes. * <p> * <fullname>AWS Import/Export Service</fullname> AWS Import/Export accelerates * transferring large amounts of data between the AWS cloud and portable storage * devices that you mail to us. AWS Import/Export transfers data directly onto * and off of your storage devices using Amazon's high-speed internal network * and bypassing the Internet. For large data sets, AWS Import/Export is often * faster than Internet transfer and more cost effective than upgrading your * connectivity. */ public class AmazonImportExportClient extends AmazonWebServiceClient implements AmazonImportExport { /** Provider for AWS credentials. */ private AWSCredentialsProvider awsCredentialsProvider; private static final Log log = LogFactory.getLog(AmazonImportExport.class); /** Default signing name for the service. */ private final String DEFAULT_SIGNING_NAME = "importexport"; /** * List of exception unmarshallers for all AWS Import/Export exceptions. */ protected final List<Unmarshaller<AmazonServiceException, Node>> exceptionUnmarshallers = new ArrayList<Unmarshaller<AmazonServiceException, Node>>(); /** * Constructs a new client to invoke service methods on AWS Import/Export. A * credentials provider chain will be used that searches for credentials in * this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Instance profile credentials delivered through the Amazon EC2 * metadata service</li> * </ul> * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @see DefaultAWSCredentialsProviderChain */ public AmazonImportExportClient() { this(new DefaultAWSCredentialsProviderChain(), com.amazonaws.PredefinedClientConfigurations.defaultConfig()); } /** * Constructs a new client to invoke service methods on AWS Import/Export. A * credentials provider chain will be used that searches for credentials in * this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Instance profile credentials delivered through the Amazon EC2 * metadata service</li> * </ul> * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param clientConfiguration * The client configuration options controlling how this client * connects to AWS Import/Export (ex: proxy settings, retry counts, * etc.). * * @see DefaultAWSCredentialsProviderChain */ public AmazonImportExportClient(ClientConfiguration clientConfiguration) { this(new DefaultAWSCredentialsProviderChain(), clientConfiguration); } /** * Constructs a new client to invoke service methods on AWS Import/Export * using the specified AWS account credentials. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when * authenticating with AWS services. */ public AmazonImportExportClient(AWSCredentials awsCredentials) { this(awsCredentials, com.amazonaws.PredefinedClientConfigurations .defaultConfig()); } /** * Constructs a new client to invoke service methods on AWS Import/Export * using the specified AWS account credentials and client configuration * options. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when * authenticating with AWS services. * @param clientConfiguration * The client configuration options controlling how this client * connects to AWS Import/Export (ex: proxy settings, retry counts, * etc.). */ public AmazonImportExportClient(AWSCredentials awsCredentials, ClientConfiguration clientConfiguration) { super(clientConfiguration); this.awsCredentialsProvider = new StaticCredentialsProvider( awsCredentials); init(); } /** * Constructs a new client to invoke service methods on AWS Import/Export * using the specified AWS account credentials provider. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. */ public AmazonImportExportClient( AWSCredentialsProvider awsCredentialsProvider) { this(awsCredentialsProvider, com.amazonaws.PredefinedClientConfigurations.defaultConfig()); } /** * Constructs a new client to invoke service methods on AWS Import/Export * using the specified AWS account credentials provider and client * configuration options. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @param clientConfiguration * The client configuration options controlling how this client * connects to AWS Import/Export (ex: proxy settings, retry counts, * etc.). */ public AmazonImportExportClient( AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration) { this(awsCredentialsProvider, clientConfiguration, null); } /** * Constructs a new client to invoke service methods on AWS Import/Export * using the specified AWS account credentials provider, client * configuration options, and request metric collector. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @param clientConfiguration * The client configuration options controlling how this client * connects to AWS Import/Export (ex: proxy settings, retry counts, * etc.). * @param requestMetricCollector * optional request metric collector */ public AmazonImportExportClient( AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration, RequestMetricCollector requestMetricCollector) { super(clientConfiguration, requestMetricCollector); this.awsCredentialsProvider = awsCredentialsProvider; init(); } private void init() { exceptionUnmarshallers.add(new BucketPermissionExceptionUnmarshaller()); exceptionUnmarshallers .add(new InvalidManifestFieldExceptionUnmarshaller()); exceptionUnmarshallers .add(new MissingManifestFieldExceptionUnmarshaller()); exceptionUnmarshallers .add(new MalformedManifestExceptionUnmarshaller()); exceptionUnmarshallers.add(new InvalidJobIdExceptionUnmarshaller()); exceptionUnmarshallers.add(new ExpiredJobIdExceptionUnmarshaller()); exceptionUnmarshallers .add(new InvalidFileSystemExceptionUnmarshaller()); exceptionUnmarshallers .add(new UnableToCancelJobIdExceptionUnmarshaller()); exceptionUnmarshallers .add(new UnableToUpdateJobIdExceptionUnmarshaller()); exceptionUnmarshallers.add(new MissingParameterExceptionUnmarshaller()); exceptionUnmarshallers .add(new InvalidAccessKeyIdExceptionUnmarshaller()); exceptionUnmarshallers .add(new CreateJobQuotaExceededExceptionUnmarshaller()); exceptionUnmarshallers.add(new InvalidParameterExceptionUnmarshaller()); exceptionUnmarshallers.add(new InvalidVersionExceptionUnmarshaller()); exceptionUnmarshallers.add(new MissingCustomsExceptionUnmarshaller()); exceptionUnmarshallers.add(new InvalidAddressExceptionUnmarshaller()); exceptionUnmarshallers.add(new NoSuchBucketExceptionUnmarshaller()); exceptionUnmarshallers.add(new InvalidCustomsExceptionUnmarshaller()); exceptionUnmarshallers.add(new CanceledJobIdExceptionUnmarshaller()); exceptionUnmarshallers.add(new MultipleRegionsExceptionUnmarshaller()); exceptionUnmarshallers.add(new StandardErrorUnmarshaller()); // calling this.setEndPoint(...) will also modify the signer accordingly this.setEndpoint("https://importexport.amazonaws.com"); setServiceNameIntern(DEFAULT_SIGNING_NAME); HandlerChainFactory chainFactory = new HandlerChainFactory(); requestHandler2s .addAll(chainFactory .newRequestHandlerChain("/com/amazonaws/services/importexport/request.handlers")); requestHandler2s .addAll(chainFactory .newRequestHandler2Chain("/com/amazonaws/services/importexport/request.handler2s")); } /** * This operation cancels a specified job. Only the job owner can cancel it. * The operation fails if the job has already started or is complete. * * @param cancelJobRequest * Input structure for the CancelJob operation. * @return Result of the CancelJob operation returned by the service. * @throws InvalidJobIdException * The JOBID was missing, not found, or not associated with the AWS * account. * @throws ExpiredJobIdException * Indicates that the specified job has expired out of the system. * @throws CanceledJobIdException * The specified job ID has been canceled and is no longer valid. * @throws UnableToCancelJobIdException * AWS Import/Export cannot cancel the job * @throws InvalidAccessKeyIdException * The AWS Access Key ID specified in the request did not match the * manifest's accessKeyId value. The manifest and the request * authentication must use the same AWS Access Key ID. * @throws InvalidVersionException * The client tool version is invalid. */ @Override public CancelJobResult cancelJob(CancelJobRequest cancelJobRequest) { ExecutionContext executionContext = createExecutionContext(cancelJobRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<CancelJobRequest> request = null; Response<CancelJobResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new CancelJobRequestMarshaller() .marshall(cancelJobRequest); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } response = invoke(request, new CancelJobResultStaxUnmarshaller(), executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * This operation initiates the process of scheduling an upload or download * of your data. You include in the request a manifest that describes the * data transfer specifics. The response to the request includes a job ID, * which you can use in other operations, a signature that you use to * identify your storage device, and the address where you should ship your * storage device. * * @param createJobRequest * Input structure for the CreateJob operation. * @return Result of the CreateJob operation returned by the service. * @throws MissingParameterException * One or more required parameters was missing from the request. * @throws InvalidParameterException * One or more parameters had an invalid value. * @throws InvalidAccessKeyIdException * The AWS Access Key ID specified in the request did not match the * manifest's accessKeyId value. The manifest and the request * authentication must use the same AWS Access Key ID. * @throws InvalidAddressException * The address specified in the manifest is invalid. * @throws InvalidManifestFieldException * One or more manifest fields was invalid. Please correct and * resubmit. * @throws MissingManifestFieldException * One or more required fields were missing from the manifest file. * Please correct and resubmit. * @throws NoSuchBucketException * The specified bucket does not exist. Create the specified bucket * or change the manifest's bucket, exportBucket, or logBucket field * to a bucket that the account, as specified by the manifest's * Access Key ID, has write permissions to. * @throws MissingCustomsException * One or more required customs parameters was missing from the * manifest. * @throws InvalidCustomsException * One or more customs parameters was invalid. Please correct and * resubmit. * @throws InvalidFileSystemException * File system specified in export manifest is invalid. * @throws MultipleRegionsException * Your manifest file contained buckets from multiple regions. A job * is restricted to buckets from one region. Please correct and * resubmit. * @throws BucketPermissionException * The account specified does not have the appropriate bucket * permissions. * @throws MalformedManifestException * Your manifest is not well-formed. * @throws CreateJobQuotaExceededException * Each account can create only a certain number of jobs per day. If * you need to create more than this, please contact * awsimportexport@amazon.com to explain your particular use case. * @throws InvalidJobIdException * The JOBID was missing, not found, or not associated with the AWS * account. * @throws InvalidVersionException * The client tool version is invalid. */ @Override public CreateJobResult createJob(CreateJobRequest createJobRequest) { ExecutionContext executionContext = createExecutionContext(createJobRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<CreateJobRequest> request = null; Response<CreateJobResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new CreateJobRequestMarshaller() .marshall(createJobRequest); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } response = invoke(request, new CreateJobResultStaxUnmarshaller(), executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * This operation generates a pre-paid UPS shipping label that you will use * to ship your device to AWS for processing. * * @param getShippingLabelRequest * null * @return Result of the GetShippingLabel operation returned by the service. * @throws InvalidJobIdException * The JOBID was missing, not found, or not associated with the AWS * account. * @throws ExpiredJobIdException * Indicates that the specified job has expired out of the system. * @throws CanceledJobIdException * The specified job ID has been canceled and is no longer valid. * @throws InvalidAccessKeyIdException * The AWS Access Key ID specified in the request did not match the * manifest's accessKeyId value. The manifest and the request * authentication must use the same AWS Access Key ID. * @throws InvalidAddressException * The address specified in the manifest is invalid. * @throws InvalidVersionException * The client tool version is invalid. * @throws InvalidParameterException * One or more parameters had an invalid value. */ @Override public GetShippingLabelResult getShippingLabel( GetShippingLabelRequest getShippingLabelRequest) { ExecutionContext executionContext = createExecutionContext(getShippingLabelRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<GetShippingLabelRequest> request = null; Response<GetShippingLabelResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new GetShippingLabelRequestMarshaller() .marshall(getShippingLabelRequest); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } response = invoke(request, new GetShippingLabelResultStaxUnmarshaller(), executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * This operation returns information about a job, including where the job * is in the processing pipeline, the status of the results, and the * signature value associated with the job. You can only return information * about jobs you own. * * @param getStatusRequest * Input structure for the GetStatus operation. * @return Result of the GetStatus operation returned by the service. * @throws InvalidJobIdException * The JOBID was missing, not found, or not associated with the AWS * account. * @throws ExpiredJobIdException * Indicates that the specified job has expired out of the system. * @throws CanceledJobIdException * The specified job ID has been canceled and is no longer valid. * @throws InvalidAccessKeyIdException * The AWS Access Key ID specified in the request did not match the * manifest's accessKeyId value. The manifest and the request * authentication must use the same AWS Access Key ID. * @throws InvalidVersionException * The client tool version is invalid. */ @Override public GetStatusResult getStatus(GetStatusRequest getStatusRequest) { ExecutionContext executionContext = createExecutionContext(getStatusRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<GetStatusRequest> request = null; Response<GetStatusResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new GetStatusRequestMarshaller() .marshall(getStatusRequest); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } response = invoke(request, new GetStatusResultStaxUnmarshaller(), executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * This operation returns the jobs associated with the requester. AWS * Import/Export lists the jobs in reverse chronological order based on the * date of creation. For example if Job Test1 was created 2009Dec30 and Test2 * was created 2010Feb05, the ListJobs operation would return Test2 followed * by Test1. * * @param listJobsRequest * Input structure for the ListJobs operation. * @return Result of the ListJobs operation returned by the service. * @throws InvalidParameterException * One or more parameters had an invalid value. * @throws InvalidAccessKeyIdException * The AWS Access Key ID specified in the request did not match the * manifest's accessKeyId value. The manifest and the request * authentication must use the same AWS Access Key ID. * @throws InvalidVersionException * The client tool version is invalid. */ @Override public ListJobsResult listJobs(ListJobsRequest listJobsRequest) { ExecutionContext executionContext = createExecutionContext(listJobsRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListJobsRequest> request = null; Response<ListJobsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListJobsRequestMarshaller() .marshall(listJobsRequest); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } response = invoke(request, new ListJobsResultStaxUnmarshaller(), executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } @Override public ListJobsResult listJobs() { return listJobs(new ListJobsRequest()); } /** * You use this operation to change the parameters specified in the original * manifest file by supplying a new manifest file. The manifest file * attached to this request replaces the original manifest file. You can only * use the operation after a CreateJob request but before the data transfer * starts and you can only use it on jobs you own. * * @param updateJobRequest * Input structure for the UpateJob operation. * @return Result of the UpdateJob operation returned by the service. * @throws MissingParameterException * One or more required parameters was missing from the request. * @throws InvalidParameterException * One or more parameters had an invalid value. * @throws InvalidAccessKeyIdException * The AWS Access Key ID specified in the request did not match the * manifest's accessKeyId value. The manifest and the request * authentication must use the same AWS Access Key ID. * @throws InvalidAddressException * The address specified in the manifest is invalid. * @throws InvalidManifestFieldException * One or more manifest fields was invalid. Please correct and * resubmit. * @throws InvalidJobIdException * The JOBID was missing, not found, or not associated with the AWS * account. * @throws MissingManifestFieldException * One or more required fields were missing from the manifest file. * Please correct and resubmit. * @throws NoSuchBucketException * The specified bucket does not exist. Create the specified bucket * or change the manifest's bucket, exportBucket, or logBucket field * to a bucket that the account, as specified by the manifest's * Access Key ID, has write permissions to. * @throws ExpiredJobIdException * Indicates that the specified job has expired out of the system. * @throws CanceledJobIdException * The specified job ID has been canceled and is no longer valid. * @throws MissingCustomsException * One or more required customs parameters was missing from the * manifest. * @throws InvalidCustomsException * One or more customs parameters was invalid. Please correct and * resubmit. * @throws InvalidFileSystemException * File system specified in export manifest is invalid. * @throws MultipleRegionsException * Your manifest file contained buckets from multiple regions. A job * is restricted to buckets from one region. Please correct and * resubmit. * @throws BucketPermissionException * The account specified does not have the appropriate bucket * permissions. * @throws MalformedManifestException * Your manifest is not well-formed. * @throws UnableToUpdateJobIdException * AWS Import/Export cannot update the job * @throws InvalidVersionException * The client tool version is invalid. */ @Override public UpdateJobResult updateJob(UpdateJobRequest updateJobRequest) { ExecutionContext executionContext = createExecutionContext(updateJobRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<UpdateJobRequest> request = null; Response<UpdateJobResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new UpdateJobRequestMarshaller() .marshall(updateJobRequest); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } response = invoke(request, new UpdateJobResultStaxUnmarshaller(), executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * Returns additional metadata for a previously executed successful, * request, typically used for debugging issues where a service isn't acting * as expected. This data isn't considered part of the result data returned * by an operation, so it's available through this separate, diagnostic * interface. * <p> * Response metadata is only cached for a limited period of time, so if you * need to access this extra diagnostic information for an executed request, * you should use this method to retrieve it as soon as possible after * executing the request. * * @param request * The originally executed request * * @return The response metadata for the specified request, or null if none * is available. */ public ResponseMetadata getCachedResponseMetadata( AmazonWebServiceRequest request) { return client.getResponseMetadataForRequest(request); } private <X, Y extends AmazonWebServiceRequest> Response<X> invoke( Request<Y> request, Unmarshaller<X, StaxUnmarshallerContext> unmarshaller, ExecutionContext executionContext) { request.setEndpoint(endpoint); request.setTimeOffset(timeOffset); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); AWSCredentials credentials; awsRequestMetrics.startEvent(Field.CredentialsRequestTime); try { credentials = awsCredentialsProvider.getCredentials(); } finally { awsRequestMetrics.endEvent(Field.CredentialsRequestTime); } AmazonWebServiceRequest originalRequest = request.getOriginalRequest(); if (originalRequest != null && originalRequest.getRequestCredentials() != null) { credentials = originalRequest.getRequestCredentials(); } executionContext.setCredentials(credentials); StaxResponseHandler<X> responseHandler = new StaxResponseHandler<X>( unmarshaller); DefaultErrorResponseHandler errorResponseHandler = new DefaultErrorResponseHandler( exceptionUnmarshallers); return client.execute(request, responseHandler, errorResponseHandler, executionContext); } }
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.ssl; import static io.netty.util.internal.ObjectUtil.checkNotNull; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLException; import javax.net.ssl.TrustManagerFactory; import java.io.File; import java.io.InputStream; import java.security.PrivateKey; import java.security.cert.X509Certificate; /** * Builder for configuring a new SslContext for creation. */ public final class SslContextBuilder { /** * Creates a builder for new client-side {@link SslContext}. */ public static SslContextBuilder forClient() { return new SslContextBuilder(false); } /** * Creates a builder for new server-side {@link SslContext}. * * @param keyCertChainFile an X.509 certificate chain file in PEM format * @param keyFile a PKCS#8 private key file in PEM format * @see #keyManager(File, File) */ public static SslContextBuilder forServer(File keyCertChainFile, File keyFile) { return new SslContextBuilder(true).keyManager(keyCertChainFile, keyFile); } /** * Creates a builder for new server-side {@link SslContext}. * * @param keyCertChainInputStream an input stream for an X.509 certificate chain in PEM format * @param keyInputStream an input stream for a PKCS#8 private key in PEM format * @see #keyManager(InputStream, InputStream) */ public static SslContextBuilder forServer(InputStream keyCertChainInputStream, InputStream keyInputStream) { return new SslContextBuilder(true).keyManager(keyCertChainInputStream, keyInputStream); } /** * Creates a builder for new server-side {@link SslContext}. * * @param key a PKCS#8 private key * @param keyCertChain the X.509 certificate chain * @see #keyManager(PrivateKey, X509Certificate[]) */ public static SslContextBuilder forServer(PrivateKey key, X509Certificate... keyCertChain) { return new SslContextBuilder(true).keyManager(key, keyCertChain); } /** * Creates a builder for new server-side {@link SslContext}. * * @param keyCertChainFile an X.509 certificate chain file in PEM format * @param keyFile a PKCS#8 private key file in PEM format * @param keyPassword the password of the {@code keyFile}, or {@code null} if it's not * password-protected * @see #keyManager(File, File, String) */ public static SslContextBuilder forServer( File keyCertChainFile, File keyFile, String keyPassword) { return new SslContextBuilder(true).keyManager(keyCertChainFile, keyFile, keyPassword); } /** * Creates a builder for new server-side {@link SslContext}. * * @param keyCertChainInputStream an input stream for an X.509 certificate chain in PEM format * @param keyInputStream an input stream for a PKCS#8 private key in PEM format * @param keyPassword the password of the {@code keyFile}, or {@code null} if it's not * password-protected * @see #keyManager(InputStream, InputStream, String) */ public static SslContextBuilder forServer( InputStream keyCertChainInputStream, InputStream keyInputStream, String keyPassword) { return new SslContextBuilder(true).keyManager(keyCertChainInputStream, keyInputStream, keyPassword); } /** * Creates a builder for new server-side {@link SslContext}. * * @param key a PKCS#8 private key * @param keyCertChain the X.509 certificate chain * @param keyPassword the password of the {@code keyFile}, or {@code null} if it's not * password-protected * @see #keyManager(File, File, String) */ public static SslContextBuilder forServer( PrivateKey key, String keyPassword, X509Certificate... keyCertChain) { return new SslContextBuilder(true).keyManager(key, keyPassword, keyCertChain); } /** * Creates a builder for new server-side {@link SslContext}. * * @param keyManagerFactory non-{@code null} factory for server's private key * @see #keyManager(KeyManagerFactory) */ public static SslContextBuilder forServer(KeyManagerFactory keyManagerFactory) { return new SslContextBuilder(true).keyManager(keyManagerFactory); } private final boolean forServer; private SslProvider provider; private X509Certificate[] trustCertCollection; private TrustManagerFactory trustManagerFactory; private X509Certificate[] keyCertChain; private PrivateKey key; private String keyPassword; private KeyManagerFactory keyManagerFactory; private Iterable<String> ciphers; private CipherSuiteFilter cipherFilter = IdentityCipherSuiteFilter.INSTANCE; private ApplicationProtocolConfig apn; private long sessionCacheSize; private long sessionTimeout; private ClientAuth clientAuth = ClientAuth.NONE; private SslContextBuilder(boolean forServer) { this.forServer = forServer; } /** * The {@link SslContext} implementation to use. {@code null} uses the default one. */ public SslContextBuilder sslProvider(SslProvider provider) { this.provider = provider; return this; } /** * Trusted certificates for verifying the remote endpoint's certificate. The file should * contain an X.509 certificate collection in PEM format. {@code null} uses the system default. */ public SslContextBuilder trustManager(File trustCertCollectionFile) { try { return trustManager(SslContext.toX509Certificates(trustCertCollectionFile)); } catch (Exception e) { throw new IllegalArgumentException("File does not contain valid certificates: " + trustCertCollectionFile, e); } } /** * Trusted certificates for verifying the remote endpoint's certificate. The input stream should * contain an X.509 certificate collection in PEM format. {@code null} uses the system default. */ public SslContextBuilder trustManager(InputStream trustCertCollectionInputStream) { try { return trustManager(SslContext.toX509Certificates(trustCertCollectionInputStream)); } catch (Exception e) { throw new IllegalArgumentException("Input stream does not contain valid certificates.", e); } } /** * Trusted certificates for verifying the remote endpoint's certificate, {@code null} uses the system default. */ public SslContextBuilder trustManager(X509Certificate... trustCertCollection) { this.trustCertCollection = trustCertCollection != null ? trustCertCollection.clone() : null; trustManagerFactory = null; return this; } /** * Trusted manager for verifying the remote endpoint's certificate. Using a {@link * TrustManagerFactory} is only supported for {@link SslProvider#JDK}; for other providers, * you must use {@link #trustManager(File)}. {@code null} uses the system default. */ public SslContextBuilder trustManager(TrustManagerFactory trustManagerFactory) { trustCertCollection = null; this.trustManagerFactory = trustManagerFactory; return this; } /** * Identifying certificate for this host. {@code keyCertChainFile} and {@code keyFile} may * be {@code null} for client contexts, which disables mutual authentication. * * @param keyCertChainFile an X.509 certificate chain file in PEM format * @param keyFile a PKCS#8 private key file in PEM format */ public SslContextBuilder keyManager(File keyCertChainFile, File keyFile) { return keyManager(keyCertChainFile, keyFile, null); } /** * Identifying certificate for this host. {@code keyCertChainInputStream} and {@code keyInputStream} may * be {@code null} for client contexts, which disables mutual authentication. * * @param keyCertChainInputStream an input stream for an X.509 certificate chain in PEM format * @param keyInputStream an input stream for a PKCS#8 private key in PEM format */ public SslContextBuilder keyManager(InputStream keyCertChainInputStream, InputStream keyInputStream) { return keyManager(keyCertChainInputStream, keyInputStream, null); } /** * Identifying certificate for this host. {@code keyCertChain} and {@code key} may * be {@code null} for client contexts, which disables mutual authentication. * * @param key a PKCS#8 private key * @param keyCertChain an X.509 certificate chain */ public SslContextBuilder keyManager(PrivateKey key, X509Certificate... keyCertChain) { return keyManager(key, null, keyCertChain); } /** * Identifying certificate for this host. {@code keyCertChainFile} and {@code keyFile} may * be {@code null} for client contexts, which disables mutual authentication. * * @param keyCertChainFile an X.509 certificate chain file in PEM format * @param keyFile a PKCS#8 private key file in PEM format * @param keyPassword the password of the {@code keyFile}, or {@code null} if it's not * password-protected */ public SslContextBuilder keyManager(File keyCertChainFile, File keyFile, String keyPassword) { X509Certificate[] keyCertChain; PrivateKey key; try { keyCertChain = SslContext.toX509Certificates(keyCertChainFile); } catch (Exception e) { throw new IllegalArgumentException("File does not contain valid certificates: " + keyCertChainFile, e); } try { key = SslContext.toPrivateKey(keyFile, keyPassword); } catch (Exception e) { throw new IllegalArgumentException("File does not contain valid private key: " + keyFile, e); } return keyManager(key, keyPassword, keyCertChain); } /** * Identifying certificate for this host. {@code keyCertChainInputStream} and {@code keyInputStream} may * be {@code null} for client contexts, which disables mutual authentication. * * @param keyCertChainInputStream an input stream for an X.509 certificate chain in PEM format * @param keyInputStream an input stream for a PKCS#8 private key in PEM format * @param keyPassword the password of the {@code keyInputStream}, or {@code null} if it's not * password-protected */ public SslContextBuilder keyManager(InputStream keyCertChainInputStream, InputStream keyInputStream, String keyPassword) { X509Certificate[] keyCertChain; PrivateKey key; try { keyCertChain = SslContext.toX509Certificates(keyCertChainInputStream); } catch (Exception e) { throw new IllegalArgumentException("Input stream not contain valid certificates.", e); } try { key = SslContext.toPrivateKey(keyInputStream, keyPassword); } catch (Exception e) { throw new IllegalArgumentException("Input stream does not contain valid private key.", e); } return keyManager(key, keyPassword, keyCertChain); } /** * Identifying certificate for this host. {@code keyCertChain} and {@code key} may * be {@code null} for client contexts, which disables mutual authentication. * * @param key a PKCS#8 private key file * @param keyPassword the password of the {@code key}, or {@code null} if it's not * password-protected * @param keyCertChain an X.509 certificate chain */ public SslContextBuilder keyManager(PrivateKey key, String keyPassword, X509Certificate... keyCertChain) { if (forServer) { checkNotNull(keyCertChain, "keyCertChain required for servers"); if (keyCertChain.length == 0) { throw new IllegalArgumentException("keyCertChain must be non-empty"); } checkNotNull(key, "key required for servers"); } if (keyCertChain == null || keyCertChain.length == 0) { this.keyCertChain = null; } else { for (X509Certificate cert: keyCertChain) { if (cert == null) { throw new IllegalArgumentException("keyCertChain contains null entry"); } } this.keyCertChain = keyCertChain.clone(); } this.key = key; this.keyPassword = keyPassword; keyManagerFactory = null; return this; } /** * Identifying manager for this host. {@code keyManagerFactory} may be {@code null} for * client contexts, which disables mutual authentication. Using a {@code KeyManagerFactory} * is only supported for {@link SslProvider#JDK}; for other providers, you must use {@link * #keyManager(File, File)} or {@link #keyManager(File, File, String)}. */ public SslContextBuilder keyManager(KeyManagerFactory keyManagerFactory) { if (forServer) { checkNotNull(keyManagerFactory, "keyManagerFactory required for servers"); } keyCertChain = null; key = null; keyPassword = null; this.keyManagerFactory = keyManagerFactory; return this; } /** * The cipher suites to enable, in the order of preference. {@code null} to use default * cipher suites. */ public SslContextBuilder ciphers(Iterable<String> ciphers) { return ciphers(ciphers, IdentityCipherSuiteFilter.INSTANCE); } /** * The cipher suites to enable, in the order of preference. {@code cipherFilter} will be * applied to the ciphers before use if provider is {@link SslProvider#JDK}. If {@code * ciphers} is {@code null}, then the default cipher suites will be used. */ public SslContextBuilder ciphers(Iterable<String> ciphers, CipherSuiteFilter cipherFilter) { checkNotNull(cipherFilter, "cipherFilter"); this.ciphers = ciphers; this.cipherFilter = cipherFilter; return this; } /** * Application protocol negotiation configuration. {@code null} disables support. */ public SslContextBuilder applicationProtocolConfig(ApplicationProtocolConfig apn) { this.apn = apn; return this; } /** * Set the size of the cache used for storing SSL session objects. {@code 0} to use the * default value. */ public SslContextBuilder sessionCacheSize(long sessionCacheSize) { this.sessionCacheSize = sessionCacheSize; return this; } /** * Set the timeout for the cached SSL session objects, in seconds. {@code 0} to use the * default value. */ public SslContextBuilder sessionTimeout(long sessionTimeout) { this.sessionTimeout = sessionTimeout; return this; } /** * Sets the client authentication mode. */ public SslContextBuilder clientAuth(ClientAuth clientAuth) { this.clientAuth = checkNotNull(clientAuth, "clientAuth"); return this; } /** * Create new {@code SslContext} instance with configured settings. */ public SslContext build() throws SSLException { if (forServer) { return SslContext.newServerContextInternal(provider, trustCertCollection, trustManagerFactory, keyCertChain, key, keyPassword, keyManagerFactory, ciphers, cipherFilter, apn, sessionCacheSize, sessionTimeout, clientAuth); } else { return SslContext.newClientContextInternal(provider, trustCertCollection, trustManagerFactory, keyCertChain, key, keyPassword, keyManagerFactory, ciphers, cipherFilter, apn, sessionCacheSize, sessionTimeout); } } }
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.profile.codeInspection.ui.table; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.daemon.HighlightDisplayKey; import com.intellij.codeInspection.ex.Descriptor; import com.intellij.codeInspection.ex.InspectionProfileImpl; import com.intellij.codeInspection.ex.ScopeToolState; import com.intellij.ide.DataManager; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.openapi.util.Comparing; import com.intellij.profile.codeInspection.ui.ScopeOrderComparator; import com.intellij.profile.codeInspection.ui.ScopesChooser; import com.intellij.profile.codeInspection.ui.inspectionsTree.InspectionConfigTreeNode; import com.intellij.psi.search.scope.packageSet.NamedScope; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.table.JBTable; import com.intellij.util.ArrayUtil; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.EditableModel; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableColumn; import javax.swing.table.TableColumnModel; import java.awt.*; import java.util.*; import java.util.List; /** * @author Dmitry Batkovich */ public class ScopesAndSeveritiesTable extends JBTable { private final static Logger LOG = Logger.getInstance(ScopesAndSeveritiesTable.class); public static final HighlightSeverity MIXED_FAKE_SEVERITY = new HighlightSeverity("Mixed", -1); @SuppressWarnings("UnusedDeclaration") public static final HighlightDisplayLevel MIXED_FAKE_LEVEL = new HighlightDisplayLevel(MIXED_FAKE_SEVERITY, JBUI.scale(EmptyIcon.create(12))); private final static int SCOPE_ENABLED_COLUMN = 0; private final static int SCOPE_NAME_COLUMN = 1; private final static int SEVERITY_COLUMN = 2; public ScopesAndSeveritiesTable(final TableSettings tableSettings) { super(new MyTableModel(tableSettings)); final TableColumnModel columnModel = getColumnModel(); final TableColumn scopeEnabledColumn = columnModel.getColumn(SCOPE_ENABLED_COLUMN); scopeEnabledColumn.setMaxWidth(30); scopeEnabledColumn.setCellRenderer(new ThreeStateCheckBoxRenderer()); scopeEnabledColumn.setCellEditor(new ThreeStateCheckBoxRenderer()); final TableColumn severityColumn = columnModel.getColumn(SEVERITY_COLUMN); severityColumn.setCellRenderer(SeverityRenderer.create(tableSettings.getInspectionProfile(), null)); severityColumn.setCellEditor(SeverityRenderer.create(tableSettings.getInspectionProfile(), () -> tableSettings.onSettingsChanged())); setColumnSelectionAllowed(false); setRowSelectionAllowed(true); setSelectionMode(ListSelectionModel.SINGLE_SELECTION); getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(final ListSelectionEvent e) { final int idx = getSelectionModel().getMinSelectionIndex(); if (idx >= 0) { final ExistedScopesStatesAndNonExistNames scopeToolState = ((MyTableModel)getModel()).getScopeToolState(idx); final List<ScopeToolState> existedStates = scopeToolState.getExistedStates(); if (existedStates.size() == 1 && scopeToolState.getNonExistNames().isEmpty()) { tableSettings.onScopeChosen(existedStates.get(0)); } } } }); setRowSelectionInterval(0, 0); setStriped(true); setShowGrid(false); ((MyTableModel)getModel()).setTable(this); } public abstract static class TableSettings { private final List<InspectionConfigTreeNode.Tool> myNodes; private final List<String> myKeyNames; private final List<HighlightDisplayKey> myKeys; private final InspectionProfileImpl myInspectionProfile; private final Project myProject; protected TableSettings(final List<InspectionConfigTreeNode.Tool> nodes, final InspectionProfileImpl inspectionProfile, final Project project) { myNodes = nodes; myKeys = new ArrayList<>(myNodes.size()); myKeyNames = new ArrayList<>(myNodes.size()); for(final InspectionConfigTreeNode.Tool node : nodes) { final HighlightDisplayKey key = node.getKey(); myKeys.add(key); myKeyNames.add(key.toString()); } myInspectionProfile = inspectionProfile; myProject = project; } public List<HighlightDisplayKey> getKeys() { return myKeys; } public List<String> getKeyNames() { return myKeyNames; } public List<InspectionConfigTreeNode.Tool> getNodes() { return myNodes; } public InspectionProfileImpl getInspectionProfile() { return myInspectionProfile; } public Project getProject() { return myProject; } protected abstract void onScopeAdded(); protected abstract void onScopesOrderChanged(); protected abstract void onScopeRemoved(final int scopesCount); protected abstract void onScopeChosen(final @NotNull ScopeToolState scopeToolState); protected abstract void onSettingsChanged(); } @NotNull public static HighlightSeverity getSeverity(final List<ScopeToolState> scopeToolStates) { HighlightSeverity previousValue = null; for (final ScopeToolState scopeToolState : scopeToolStates) { final HighlightSeverity currentValue = scopeToolState.getLevel().getSeverity(); if (previousValue == null) { previousValue = currentValue; } else if (!previousValue.equals(currentValue)){ return MIXED_FAKE_SEVERITY; } } return previousValue; } private static class MyTableModel extends AbstractTableModel implements EditableModel { private final InspectionProfileImpl myInspectionProfile; private final List<String> myKeyNames; private final Project myProject; private final TableSettings myTableSettings; private final List<HighlightDisplayKey> myKeys; private final Comparator<String> myScopeComparator; private JTable myTable; private String[] myScopeNames; public MyTableModel(final TableSettings tableSettings) { myTableSettings = tableSettings; myProject = tableSettings.getProject(); myInspectionProfile = tableSettings.getInspectionProfile(); myKeys = tableSettings.getKeys(); myKeyNames = tableSettings.getKeyNames(); myScopeComparator = new ScopeOrderComparator(myInspectionProfile); refreshAggregatedScopes(); } public void setTable(JTable table) { myTable = table; } @Override public boolean isCellEditable(final int rowIndex, final int columnIndex) { if (columnIndex == SCOPE_NAME_COLUMN) { return false; } else if (columnIndex == SCOPE_ENABLED_COLUMN) { return true; } assert columnIndex == SEVERITY_COLUMN; final SeverityState state = getSeverityState(rowIndex); if (state.isDisabled()) { return false; } final ExistedScopesStatesAndNonExistNames scopeToolState = getScopeToolState(rowIndex); return scopeToolState.getNonExistNames().isEmpty(); } @Override public int getRowCount() { return lastRowIndex() + 1; } @Nullable @Override public String getColumnName(final int column) { return null; } @Override public int getColumnCount() { return 3; } @Override public Class<?> getColumnClass(final int columnIndex) { if (SCOPE_ENABLED_COLUMN == columnIndex) { return Boolean.class; } if (SCOPE_NAME_COLUMN == columnIndex) { return String.class; } if (SEVERITY_COLUMN == columnIndex) { return SeverityState.class; } throw new IllegalArgumentException(); } @Override public Object getValueAt(final int rowIndex, final int columnIndex) { if (rowIndex < 0) { return null; } switch (columnIndex) { case SCOPE_ENABLED_COLUMN: return isEnabled(rowIndex); case SCOPE_NAME_COLUMN: return rowIndex == lastRowIndex() ? "Everywhere else" : getScopeName(rowIndex); case SEVERITY_COLUMN: return getSeverityState(rowIndex); default: throw new IllegalArgumentException("Invalid column index " + columnIndex); } } private NamedScope getScope(final int rowIndex) { return getScopeToolState(rowIndex).getExistedStates().get(0).getScope(myProject); } private String getScopeName(final int rowIndex) { return getScopeToolState(rowIndex).getExistedStates().get(0).getScopeName(); } @NotNull private SeverityState getSeverityState(final int rowIndex) { boolean disabled = Boolean.FALSE.equals(isEnabled(rowIndex)); final ExistedScopesStatesAndNonExistNames existedScopesStatesAndNonExistNames = getScopeToolState(rowIndex); if (!existedScopesStatesAndNonExistNames.getNonExistNames().isEmpty()) { return new SeverityState(MIXED_FAKE_SEVERITY, false, disabled); } return new SeverityState(getSeverity(existedScopesStatesAndNonExistNames.getExistedStates()), !disabled, disabled); } @Nullable private Boolean isEnabled(final int rowIndex) { Boolean previousValue = null; final ExistedScopesStatesAndNonExistNames existedScopesStatesAndNonExistNames = getScopeToolState(rowIndex); for (final ScopeToolState scopeToolState : existedScopesStatesAndNonExistNames.getExistedStates()) { final boolean currentValue = scopeToolState.isEnabled(); if (previousValue == null) { previousValue = currentValue; } else if (!previousValue.equals(currentValue)){ return null; } } if (!existedScopesStatesAndNonExistNames.getNonExistNames().isEmpty() && !Boolean.FALSE.equals(previousValue)) { return null; } return previousValue; } private ExistedScopesStatesAndNonExistNames getScopeToolState(final int rowIndex) { final List<String> nonExistNames = new SmartList<>(); final List<ScopeToolState> existedStates = new SmartList<>(); for (final String keyName : myKeyNames) { final ScopeToolState scopeToolState = getScopeToolState(keyName, rowIndex); if (scopeToolState != null) { existedStates.add(scopeToolState); } else { nonExistNames.add(keyName); } } return new ExistedScopesStatesAndNonExistNames(existedStates, nonExistNames); } @Nullable private ScopeToolState getScopeToolState(final String keyName, final int rowIndex) { if (rowIndex == lastRowIndex()) { return myInspectionProfile.getToolDefaultState(keyName, myProject); } else { final String scopeName = myScopeNames[rowIndex]; final List<ScopeToolState> nonDefaultTools = myInspectionProfile.getNonDefaultTools(keyName, myProject); for (final ScopeToolState nonDefaultTool : nonDefaultTools) { if (Comparing.equal(scopeName, nonDefaultTool.getScopeName())) { return nonDefaultTool; } } } return null; } private void refreshAggregatedScopes() { final LinkedHashSet<String> scopesNames = new LinkedHashSet<>(); for (final String keyName : myKeyNames) { final List<ScopeToolState> nonDefaultTools = myInspectionProfile.getNonDefaultTools(keyName, myProject); for (final ScopeToolState tool : nonDefaultTools) { scopesNames.add(tool.getScopeName()); } } myScopeNames = ArrayUtil.toStringArray(scopesNames); Arrays.sort(myScopeNames, myScopeComparator); } private int lastRowIndex() { return myScopeNames.length; } @Override public void setValueAt(final Object value, final int rowIndex, final int columnIndex) { if (value == null) { return; } if (columnIndex == SEVERITY_COLUMN) { final SeverityState severityState = (SeverityState)value; final HighlightDisplayLevel level = HighlightDisplayLevel.find(severityState.getSeverity().getName()); if (level == null) { LOG.error("no display level found for name " + severityState.getSeverity().getName()); return; } final String scopeName = rowIndex == lastRowIndex() ? null : getScopeName(rowIndex); myInspectionProfile.setErrorLevel(myKeys, level, scopeName, myProject); } else if (columnIndex == SCOPE_ENABLED_COLUMN) { final NamedScope scope = getScope(rowIndex); if (scope == null) { return; } if ((Boolean)value) { for (final String keyName : myKeyNames) { myInspectionProfile.enableTool(keyName, myProject); } if (rowIndex == lastRowIndex()) { myInspectionProfile.enableToolsByDefault(myKeyNames, myProject); } else { //TODO create scopes states if not exist (need scope sorting) myInspectionProfile.enableTools(myKeyNames, scope, myProject); } } else { if (rowIndex == lastRowIndex()) { myInspectionProfile.disableToolByDefault(myKeyNames, myProject); } else { myInspectionProfile.disableTools(myKeyNames, scope, myProject); } } if (myKeyNames.size() == 1) { final String keyName = ContainerUtil.getFirstItem(myKeyNames); final ScopeToolState state = getScopeToolState(keyName, rowIndex); myTableSettings.onScopeChosen(state); } } myTableSettings.onSettingsChanged(); } @Override public void removeRow(final int idx) { if (idx != lastRowIndex()) { myInspectionProfile.removeScopes(myKeyNames, getScopeName(idx), myProject); refreshAggregatedScopes(); myTableSettings.onScopeRemoved(getRowCount()); } } @Override public void addRow() { final List<Descriptor> descriptors = ContainerUtil.map(myTableSettings.getNodes(), inspectionConfigTreeNode -> inspectionConfigTreeNode.getDefaultDescriptor()); final ScopesChooser scopesChooser = new ScopesChooser(descriptors, myInspectionProfile, myProject, myScopeNames) { @Override protected void onScopeAdded() { myTableSettings.onScopeAdded(); refreshAggregatedScopes(); } @Override protected void onScopesOrderChanged() { myTableSettings.onScopesOrderChanged(); } }; DataContext dataContext = DataManager.getInstance().getDataContext(myTable); final ListPopup popup = JBPopupFactory.getInstance() .createActionGroupPopup(ScopesChooser.TITLE, scopesChooser.createPopupActionGroup(myTable), dataContext, JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false); final RelativePoint point = new RelativePoint(myTable, new Point(myTable.getWidth() - popup.getContent().getPreferredSize().width, 0)); popup.show(point); } @Override public void exchangeRows(final int oldIndex, final int newIndex) { } @Override public boolean canExchangeRows(final int oldIndex, final int newIndex) { return false; } } private static class ExistedScopesStatesAndNonExistNames { private final List<ScopeToolState> myExistedStates; private final List<String> myNonExistNames; public ExistedScopesStatesAndNonExistNames(final List<ScopeToolState> existedStates, final List<String> nonExistNames) { myExistedStates = existedStates; myNonExistNames = nonExistNames; } public List<ScopeToolState> getExistedStates() { return myExistedStates; } public List<String> getNonExistNames() { return myNonExistNames; } } }
package com.cisco.axl.api._8; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for XSipRoutePattern complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="XSipRoutePattern"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence minOccurs="0"> * &lt;element name="pattern" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="usage" type="{http://www.cisco.com/AXL/API/8.0}XPatternUsage"/> * &lt;element name="routePartitionName" type="{http://www.cisco.com/AXL/API/8.0}XFkType"/> * &lt;element name="blockEnable" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/> * &lt;element name="callingPartyTransformationMask" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="useCallingPartyPhoneMask" type="{http://www.cisco.com/AXL/API/8.0}XStatus"/> * &lt;element name="callingPartyPrefixDigits" type="{http://www.cisco.com/AXL/API/8.0}String50" minOccurs="0"/> * &lt;element name="callingLinePresentationBit" type="{http://www.cisco.com/AXL/API/8.0}XPresentationBit" minOccurs="0"/> * &lt;element name="callingNamePresentationBit" type="{http://www.cisco.com/AXL/API/8.0}XPresentationBit" minOccurs="0"/> * &lt;element name="connectedLinePresentationBit" type="{http://www.cisco.com/AXL/API/8.0}XPresentationBit" minOccurs="0"/> * &lt;element name="connectedNamePresentationBit" type="{http://www.cisco.com/AXL/API/8.0}XPresentationBit" minOccurs="0"/> * &lt;element name="sipTrunkName" type="{http://www.cisco.com/AXL/API/8.0}XFkType"/> * &lt;element name="dnOrPatternIpv6" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="routeOnUserPart" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/> * &lt;element name="useCallerCss" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/> * &lt;element name="domainRoutingCssName" type="{http://www.cisco.com/AXL/API/8.0}XFkType" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "XSipRoutePattern", propOrder = { "pattern", "description", "usage", "routePartitionName", "blockEnable", "callingPartyTransformationMask", "useCallingPartyPhoneMask", "callingPartyPrefixDigits", "callingLinePresentationBit", "callingNamePresentationBit", "connectedLinePresentationBit", "connectedNamePresentationBit", "sipTrunkName", "dnOrPatternIpv6", "routeOnUserPart", "useCallerCss", "domainRoutingCssName" }) public class XSipRoutePattern { protected String pattern; protected String description; protected String usage; @XmlElementRef(name = "routePartitionName", type = JAXBElement.class) protected JAXBElement<XFkType> routePartitionName; protected String blockEnable; @XmlElementRef(name = "callingPartyTransformationMask", type = JAXBElement.class) protected JAXBElement<String> callingPartyTransformationMask; @XmlElement(defaultValue = "Default") protected String useCallingPartyPhoneMask; @XmlElementRef(name = "callingPartyPrefixDigits", type = JAXBElement.class) protected JAXBElement<String> callingPartyPrefixDigits; @XmlElement(defaultValue = "Default") protected String callingLinePresentationBit; @XmlElement(defaultValue = "Default") protected String callingNamePresentationBit; @XmlElement(defaultValue = "Default") protected String connectedLinePresentationBit; @XmlElement(defaultValue = "Default") protected String connectedNamePresentationBit; protected XFkType sipTrunkName; protected String dnOrPatternIpv6; protected String routeOnUserPart; protected String useCallerCss; @XmlElementRef(name = "domainRoutingCssName", type = JAXBElement.class) protected JAXBElement<XFkType> domainRoutingCssName; /** * Gets the value of the pattern property. * * @return * possible object is * {@link String } * */ public String getPattern() { return pattern; } /** * Sets the value of the pattern property. * * @param value * allowed object is * {@link String } * */ public void setPattern(String value) { this.pattern = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the usage property. * * @return * possible object is * {@link String } * */ public String getUsage() { return usage; } /** * Sets the value of the usage property. * * @param value * allowed object is * {@link String } * */ public void setUsage(String value) { this.usage = value; } /** * Gets the value of the routePartitionName property. * * @return * possible object is * {@link JAXBElement }{@code <}{@link XFkType }{@code >} * */ public JAXBElement<XFkType> getRoutePartitionName() { return routePartitionName; } /** * Sets the value of the routePartitionName property. * * @param value * allowed object is * {@link JAXBElement }{@code <}{@link XFkType }{@code >} * */ public void setRoutePartitionName(JAXBElement<XFkType> value) { this.routePartitionName = ((JAXBElement<XFkType> ) value); } /** * Gets the value of the blockEnable property. * * @return * possible object is * {@link String } * */ public String getBlockEnable() { return blockEnable; } /** * Sets the value of the blockEnable property. * * @param value * allowed object is * {@link String } * */ public void setBlockEnable(String value) { this.blockEnable = value; } /** * Gets the value of the callingPartyTransformationMask property. * * @return * possible object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public JAXBElement<String> getCallingPartyTransformationMask() { return callingPartyTransformationMask; } /** * Sets the value of the callingPartyTransformationMask property. * * @param value * allowed object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public void setCallingPartyTransformationMask(JAXBElement<String> value) { this.callingPartyTransformationMask = ((JAXBElement<String> ) value); } /** * Gets the value of the useCallingPartyPhoneMask property. * * @return * possible object is * {@link String } * */ public String getUseCallingPartyPhoneMask() { return useCallingPartyPhoneMask; } /** * Sets the value of the useCallingPartyPhoneMask property. * * @param value * allowed object is * {@link String } * */ public void setUseCallingPartyPhoneMask(String value) { this.useCallingPartyPhoneMask = value; } /** * Gets the value of the callingPartyPrefixDigits property. * * @return * possible object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public JAXBElement<String> getCallingPartyPrefixDigits() { return callingPartyPrefixDigits; } /** * Sets the value of the callingPartyPrefixDigits property. * * @param value * allowed object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public void setCallingPartyPrefixDigits(JAXBElement<String> value) { this.callingPartyPrefixDigits = ((JAXBElement<String> ) value); } /** * Gets the value of the callingLinePresentationBit property. * * @return * possible object is * {@link String } * */ public String getCallingLinePresentationBit() { return callingLinePresentationBit; } /** * Sets the value of the callingLinePresentationBit property. * * @param value * allowed object is * {@link String } * */ public void setCallingLinePresentationBit(String value) { this.callingLinePresentationBit = value; } /** * Gets the value of the callingNamePresentationBit property. * * @return * possible object is * {@link String } * */ public String getCallingNamePresentationBit() { return callingNamePresentationBit; } /** * Sets the value of the callingNamePresentationBit property. * * @param value * allowed object is * {@link String } * */ public void setCallingNamePresentationBit(String value) { this.callingNamePresentationBit = value; } /** * Gets the value of the connectedLinePresentationBit property. * * @return * possible object is * {@link String } * */ public String getConnectedLinePresentationBit() { return connectedLinePresentationBit; } /** * Sets the value of the connectedLinePresentationBit property. * * @param value * allowed object is * {@link String } * */ public void setConnectedLinePresentationBit(String value) { this.connectedLinePresentationBit = value; } /** * Gets the value of the connectedNamePresentationBit property. * * @return * possible object is * {@link String } * */ public String getConnectedNamePresentationBit() { return connectedNamePresentationBit; } /** * Sets the value of the connectedNamePresentationBit property. * * @param value * allowed object is * {@link String } * */ public void setConnectedNamePresentationBit(String value) { this.connectedNamePresentationBit = value; } /** * Gets the value of the sipTrunkName property. * * @return * possible object is * {@link XFkType } * */ public XFkType getSipTrunkName() { return sipTrunkName; } /** * Sets the value of the sipTrunkName property. * * @param value * allowed object is * {@link XFkType } * */ public void setSipTrunkName(XFkType value) { this.sipTrunkName = value; } /** * Gets the value of the dnOrPatternIpv6 property. * * @return * possible object is * {@link String } * */ public String getDnOrPatternIpv6() { return dnOrPatternIpv6; } /** * Sets the value of the dnOrPatternIpv6 property. * * @param value * allowed object is * {@link String } * */ public void setDnOrPatternIpv6(String value) { this.dnOrPatternIpv6 = value; } /** * Gets the value of the routeOnUserPart property. * * @return * possible object is * {@link String } * */ public String getRouteOnUserPart() { return routeOnUserPart; } /** * Sets the value of the routeOnUserPart property. * * @param value * allowed object is * {@link String } * */ public void setRouteOnUserPart(String value) { this.routeOnUserPart = value; } /** * Gets the value of the useCallerCss property. * * @return * possible object is * {@link String } * */ public String getUseCallerCss() { return useCallerCss; } /** * Sets the value of the useCallerCss property. * * @param value * allowed object is * {@link String } * */ public void setUseCallerCss(String value) { this.useCallerCss = value; } /** * Gets the value of the domainRoutingCssName property. * * @return * possible object is * {@link JAXBElement }{@code <}{@link XFkType }{@code >} * */ public JAXBElement<XFkType> getDomainRoutingCssName() { return domainRoutingCssName; } /** * Sets the value of the domainRoutingCssName property. * * @param value * allowed object is * {@link JAXBElement }{@code <}{@link XFkType }{@code >} * */ public void setDomainRoutingCssName(JAXBElement<XFkType> value) { this.domainRoutingCssName = ((JAXBElement<XFkType> ) value); } }
<compilationUnit> <importDeclaration> <IMPORT startline="1" startpos="0">import</IMPORT> <IDENTIFIER startline="1" startpos="7">java</IDENTIFIER> <DOT startline="1" startpos="11">.</DOT> <IDENTIFIER startline="1" startpos="12">io</IDENTIFIER> <DOT startline="1" startpos="14">.</DOT> <IDENTIFIER startline="1" startpos="15">File</IDENTIFIER> <SEMI startline="1" startpos="19">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="2" startpos="0">import</IMPORT> <IDENTIFIER startline="2" startpos="7">java</IDENTIFIER> <DOT startline="2" startpos="11">.</DOT> <IDENTIFIER startline="2" startpos="12">io</IDENTIFIER> <DOT startline="2" startpos="14">.</DOT> <IDENTIFIER startline="2" startpos="15">IOException</IDENTIFIER> <SEMI startline="2" startpos="26">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="4" startpos="0">import</IMPORT> <IDENTIFIER startline="4" startpos="7">numericLiterals</IDENTIFIER> <DOT startline="4" startpos="22">.</DOT> <IDENTIFIER startline="4" startpos="23">BinaryLiterals</IDENTIFIER> <SEMI startline="4" startpos="37">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="5" startpos="0">import</IMPORT> <IDENTIFIER startline="5" startpos="7">numericLiterals</IDENTIFIER> <DOT startline="5" startpos="22">.</DOT> <IDENTIFIER startline="5" startpos="23">State</IDENTIFIER> <SEMI startline="5" startpos="28">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="6" startpos="0">import</IMPORT> <IDENTIFIER startline="6" startpos="7">stringSwitch</IDENTIFIER> <DOT startline="6" startpos="19">.</DOT> <IDENTIFIER startline="6" startpos="20">StringInSwitchStatements</IDENTIFIER> <SEMI startline="6" startpos="44">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="7" startpos="0">import</IMPORT> <IDENTIFIER startline="7" startpos="7">diamonds</IDENTIFIER> <DOT startline="7" startpos="15">.</DOT> <IDENTIFIER startline="7" startpos="16">TypeInference</IDENTIFIER> <SEMI startline="7" startpos="29">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="8" startpos="0">import</IMPORT> <IDENTIFIER startline="8" startpos="7">diamonds</IDENTIFIER> <DOT startline="8" startpos="15">.</DOT> <IDENTIFIER startline="8" startpos="16">InstanceGetter</IDENTIFIER> <SEMI startline="8" startpos="30">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="9" startpos="0">import</IMPORT> <IDENTIFIER startline="9" startpos="7">tryWithStatements</IDENTIFIER> <DOT startline="9" startpos="24">.</DOT> <IDENTIFIER startline="9" startpos="25">TryWithStatements</IDENTIFIER> <SEMI startline="9" startpos="42">;</SEMI> </importDeclaration> <importDeclaration> <IMPORT startline="10" startpos="0">import</IMPORT> <IDENTIFIER startline="10" startpos="7">multipleException</IDENTIFIER> <DOT startline="10" startpos="24">.</DOT> <STAR startline="10" startpos="25">*</STAR> <SEMI startline="10" startpos="26">;</SEMI> </importDeclaration> <typeDeclaration> <classOrInterfaceDeclaration> <classDeclaration> <normalClassDeclaration> <modifiers> <PUBLIC startline="12" startpos="0">public</PUBLIC> </modifiers> <CLASS startline="12" startpos="7">class</CLASS> <IDENTIFIER startline="12" startpos="13">Main</IDENTIFIER> <classBody> <LBRACE startline="12" startpos="18">{</LBRACE> <classBodyDeclaration> <memberDecl> <methodDeclaration> <modifiers> <PUBLIC startline="13" startpos="1">public</PUBLIC> <STATIC startline="13" startpos="8">static</STATIC> </modifiers> <VOID startline="13" startpos="15">void</VOID> <IDENTIFIER startline="13" startpos="20">main</IDENTIFIER> <formalParameters> <LPAREN startline="13" startpos="24">(</LPAREN> <formalParameterDecls> <normalParameterDecl> <variableModifiers /> <type> <classOrInterfaceType> <IDENTIFIER startline="13" startpos="25">String</IDENTIFIER> </classOrInterfaceType> <LBRACKET startline="13" startpos="31">[</LBRACKET> <RBRACKET startline="13" startpos="32">]</RBRACKET> </type> <IDENTIFIER startline="13" startpos="34">args</IDENTIFIER> </normalParameterDecl> </formalParameterDecls> <RPAREN startline="13" startpos="38">)</RPAREN> </formalParameters> <block> <LBRACE startline="13" startpos="40">{</LBRACE> <blockStatement> <localVariableDeclarationStatement> <localVariableDeclaration> <variableModifiers /> <type> <classOrInterfaceType> <IDENTIFIER startline="14" startpos="4">BinaryLiterals</IDENTIFIER> </classOrInterfaceType> </type> <variableDeclarator> <IDENTIFIER startline="14" startpos="19">binary</IDENTIFIER> <EQ startline="14" startpos="26">=</EQ> <variableInitializer> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <creator> <NEW startline="14" startpos="28">new</NEW> <classOrInterfaceType> <IDENTIFIER startline="14" startpos="32">BinaryLiterals</IDENTIFIER> </classOrInterfaceType> <classCreatorRest> <arguments> <LPAREN startline="14" startpos="46">(</LPAREN> <RPAREN startline="14" startpos="47">)</RPAREN> </arguments> </classCreatorRest> </creator> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </variableInitializer> </variableDeclarator> </localVariableDeclaration> <SEMI startline="14" startpos="48">;</SEMI> </localVariableDeclarationStatement> </blockStatement> <blockStatement> <localVariableDeclarationStatement> <localVariableDeclaration> <variableModifiers /> <type> <classOrInterfaceType> <IDENTIFIER startline="15" startpos="4">State</IDENTIFIER> </classOrInterfaceType> </type> <variableDeclarator> <IDENTIFIER startline="15" startpos="10">state</IDENTIFIER> <EQ startline="15" startpos="16">=</EQ> <variableInitializer> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <creator> <NEW startline="15" startpos="18">new</NEW> <classOrInterfaceType> <IDENTIFIER startline="15" startpos="22">State</IDENTIFIER> </classOrInterfaceType> <classCreatorRest> <arguments> <LPAREN startline="15" startpos="27">(</LPAREN> <RPAREN startline="15" startpos="28">)</RPAREN> </arguments> </classCreatorRest> </creator> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </variableInitializer> </variableDeclarator> </localVariableDeclaration> <SEMI startline="15" startpos="29">;</SEMI> </localVariableDeclarationStatement> </blockStatement> <blockStatement> <statement> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="16" startpos="4">binary</IDENTIFIER> <DOT startline="16" startpos="10">.</DOT> <IDENTIFIER startline="16" startpos="11">decodeInstruction</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="16" startpos="28">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="16" startpos="29">binary</IDENTIFIER> <DOT startline="16" startpos="35">.</DOT> <IDENTIFIER startline="16" startpos="36">aByte</IDENTIFIER> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <COMMA startline="16" startpos="41">,</COMMA> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="16" startpos="43">state</IDENTIFIER> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="16" startpos="48">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <SEMI startline="16" startpos="49">;</SEMI> </statement> </blockStatement> <blockStatement> <localVariableDeclarationStatement> <localVariableDeclaration> <variableModifiers /> <type> <classOrInterfaceType> <IDENTIFIER startline="18" startpos="4">StringInSwitchStatements</IDENTIFIER> </classOrInterfaceType> </type> <variableDeclarator> <IDENTIFIER startline="18" startpos="29">strings</IDENTIFIER> <EQ startline="18" startpos="37">=</EQ> <variableInitializer> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <creator> <NEW startline="18" startpos="39">new</NEW> <classOrInterfaceType> <IDENTIFIER startline="18" startpos="43">StringInSwitchStatements</IDENTIFIER> </classOrInterfaceType> <classCreatorRest> <arguments> <LPAREN startline="18" startpos="67">(</LPAREN> <RPAREN startline="18" startpos="68">)</RPAREN> </arguments> </classCreatorRest> </creator> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </variableInitializer> </variableDeclarator> </localVariableDeclaration> <SEMI startline="18" startpos="69">;</SEMI> </localVariableDeclarationStatement> </blockStatement> <blockStatement> <statement> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="19" startpos="4">System</IDENTIFIER> <DOT startline="19" startpos="10">.</DOT> <IDENTIFIER startline="19" startpos="11">out</IDENTIFIER> <DOT startline="19" startpos="14">.</DOT> <IDENTIFIER startline="19" startpos="15">println</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="19" startpos="22">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="19" startpos="23">strings</IDENTIFIER> <DOT startline="19" startpos="30">.</DOT> <IDENTIFIER startline="19" startpos="31">getTypeOfDayWithSwitchStatement</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="19" startpos="62">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <literal> <stringLiteral> <STRINGLITERAL startline="19" startpos="63">"Wednesday"</STRINGLITERAL> </stringLiteral> </literal> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="19" startpos="74">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="19" startpos="75">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <SEMI startline="19" startpos="76">;</SEMI> </statement> </blockStatement> <blockStatement> <statement> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="20" startpos="4">System</IDENTIFIER> <DOT startline="20" startpos="10">.</DOT> <IDENTIFIER startline="20" startpos="11">out</IDENTIFIER> <DOT startline="20" startpos="14">.</DOT> <IDENTIFIER startline="20" startpos="15">println</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="20" startpos="22">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="20" startpos="23">strings</IDENTIFIER> <DOT startline="20" startpos="30">.</DOT> <IDENTIFIER startline="20" startpos="31">getTypeOfDayWithSwitchStatement</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="20" startpos="62">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <literal> <stringLiteral> <STRINGLITERAL startline="20" startpos="63">"Sunday"</STRINGLITERAL> </stringLiteral> </literal> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="20" startpos="71">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="20" startpos="72">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <SEMI startline="20" startpos="73">;</SEMI> </statement> </blockStatement> <blockStatement> <localVariableDeclarationStatement> <localVariableDeclaration> <variableModifiers /> <type> <classOrInterfaceType> <IDENTIFIER startline="22" startpos="4">TypeInference</IDENTIFIER> <typeArguments> <LT startline="22" startpos="17">&lt;</LT> <typeArgument> <type> <classOrInterfaceType> <IDENTIFIER startline="22" startpos="18">String</IDENTIFIER> </classOrInterfaceType> </type> </typeArgument> <GT startline="22" startpos="24">&gt;</GT> </typeArguments> </classOrInterfaceType> </type> <variableDeclarator> <IDENTIFIER startline="22" startpos="26">typeinference</IDENTIFIER> <EQ startline="22" startpos="40">=</EQ> <variableInitializer> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="22" startpos="42">InstanceGetter</IDENTIFIER> <DOT startline="22" startpos="56">.</DOT> <IDENTIFIER startline="22" startpos="57">getInstance</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="22" startpos="68">(</LPAREN> <RPAREN startline="22" startpos="69">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </variableInitializer> </variableDeclarator> </localVariableDeclaration> <SEMI startline="22" startpos="70">;</SEMI> </localVariableDeclarationStatement> </blockStatement> <blockStatement> <statement> <trystatement> <TRY startline="24" startpos="4">try</TRY> <block> <LBRACE startline="24" startpos="8">{</LBRACE> <blockStatement> <statement> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="25" startpos="4">System</IDENTIFIER> <DOT startline="25" startpos="10">.</DOT> <IDENTIFIER startline="25" startpos="11">out</IDENTIFIER> <DOT startline="25" startpos="14">.</DOT> <IDENTIFIER startline="25" startpos="15">println</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="25" startpos="22">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="25" startpos="23">TryWithStatements</IDENTIFIER> <DOT startline="25" startpos="40">.</DOT> <IDENTIFIER startline="25" startpos="41">readFirstLineFromFile</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="25" startpos="62">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <literal> <stringLiteral> <STRINGLITERAL startline="25" startpos="63">"test.txt"</STRINGLITERAL> </stringLiteral> </literal> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="25" startpos="73">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="25" startpos="74">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <SEMI startline="25" startpos="75">;</SEMI> </statement> </blockStatement> <RBRACE startline="26" startpos="4">}</RBRACE> </block> <catches> <catchClause> <CATCH startline="26" startpos="6">catch</CATCH> <LPAREN startline="26" startpos="12">(</LPAREN> <formalParameter> <variableModifiers /> <catchType> <type> <classOrInterfaceType> <IDENTIFIER startline="26" startpos="13">IOException</IDENTIFIER> </classOrInterfaceType> </type> </catchType> <IDENTIFIER startline="26" startpos="25">e</IDENTIFIER> </formalParameter> <RPAREN startline="26" startpos="26">)</RPAREN> <block> <LBRACE startline="26" startpos="28">{</LBRACE> <blockStatement> <statement> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="27" startpos="5">e</IDENTIFIER> <DOT startline="27" startpos="6">.</DOT> <IDENTIFIER startline="27" startpos="7">printStackTrace</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="27" startpos="22">(</LPAREN> <RPAREN startline="27" startpos="23">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <SEMI startline="27" startpos="24">;</SEMI> </statement> </blockStatement> <RBRACE startline="28" startpos="4">}</RBRACE> </block> </catchClause> </catches> </trystatement> </statement> </blockStatement> <blockStatement> <localVariableDeclarationStatement> <localVariableDeclaration> <variableModifiers /> <type> <classOrInterfaceType> <IDENTIFIER startline="30" startpos="4">CatchingMultipleException</IDENTIFIER> </classOrInterfaceType> </type> <variableDeclarator> <IDENTIFIER startline="30" startpos="30">exception</IDENTIFIER> <EQ startline="30" startpos="40">=</EQ> <variableInitializer> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <creator> <NEW startline="30" startpos="42">new</NEW> <classOrInterfaceType> <IDENTIFIER startline="30" startpos="46">CatchingMultipleException</IDENTIFIER> </classOrInterfaceType> <classCreatorRest> <arguments> <LPAREN startline="30" startpos="71">(</LPAREN> <RPAREN startline="30" startpos="72">)</RPAREN> </arguments> </classCreatorRest> </creator> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </variableInitializer> </variableDeclarator> </localVariableDeclaration> <SEMI startline="30" startpos="73">;</SEMI> </localVariableDeclarationStatement> </blockStatement> <blockStatement> <statement> <trystatement> <TRY startline="31" startpos="4">try</TRY> <block> <LBRACE startline="31" startpos="8">{</LBRACE> <blockStatement> <statement> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="32" startpos="5">exception</IDENTIFIER> <DOT startline="32" startpos="14">.</DOT> <IDENTIFIER startline="32" startpos="15">rethrowException</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="32" startpos="31">(</LPAREN> <RPAREN startline="32" startpos="32">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <SEMI startline="32" startpos="33">;</SEMI> </statement> </blockStatement> <RBRACE startline="33" startpos="4">}</RBRACE> </block> <catches> <catchClause> <CATCH startline="33" startpos="6">catch</CATCH> <LPAREN startline="33" startpos="12">(</LPAREN> <formalParameter> <variableModifiers /> <catchType> <type> <classOrInterfaceType> <IDENTIFIER startline="33" startpos="13">FirstException</IDENTIFIER> </classOrInterfaceType> </type> <BAR startline="33" startpos="28">|</BAR> <type> <classOrInterfaceType> <IDENTIFIER startline="33" startpos="30">LastException</IDENTIFIER> </classOrInterfaceType> </type> </catchType> <IDENTIFIER startline="33" startpos="44">e</IDENTIFIER> </formalParameter> <RPAREN startline="33" startpos="45">)</RPAREN> <block> <LBRACE startline="33" startpos="47">{</LBRACE> <blockStatement> <statement> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <IDENTIFIER startline="34" startpos="5">System</IDENTIFIER> <DOT startline="34" startpos="11">.</DOT> <IDENTIFIER startline="34" startpos="12">out</IDENTIFIER> <DOT startline="34" startpos="15">.</DOT> <IDENTIFIER startline="34" startpos="16">println</IDENTIFIER> <identifierSuffix> <arguments> <LPAREN startline="34" startpos="23">(</LPAREN> <expressionList> <expression> <conditionalExpression> <conditionalOrExpression> <conditionalAndExpression> <inclusiveOrExpression> <exclusiveOrExpression> <andExpression> <equalityExpression> <instanceOfExpression> <relationalExpression> <shiftExpression> <additiveExpression> <multiplicativeExpression> <unaryExpression> <unaryExpressionNotPlusMinus> <primary> <literal> <stringLiteral> <STRINGLITERAL startline="34" startpos="24">"catched!"</STRINGLITERAL> </stringLiteral> </literal> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> </expressionList> <RPAREN startline="34" startpos="34">)</RPAREN> </arguments> </identifierSuffix> </primary> </unaryExpressionNotPlusMinus> </unaryExpression> </multiplicativeExpression> </additiveExpression> </shiftExpression> </relationalExpression> </instanceOfExpression> </equalityExpression> </andExpression> </exclusiveOrExpression> </inclusiveOrExpression> </conditionalAndExpression> </conditionalOrExpression> </conditionalExpression> </expression> <SEMI startline="34" startpos="35">;</SEMI> </statement> </blockStatement> <RBRACE startline="35" startpos="4">}</RBRACE> </block> </catchClause> </catches> </trystatement> </statement> </blockStatement> <RBRACE startline="36" startpos="3">}</RBRACE> </block> </methodDeclaration> </memberDecl> </classBodyDeclaration> <RBRACE startline="37" startpos="0">}</RBRACE> </classBody> </normalClassDeclaration> </classDeclaration> </classOrInterfaceDeclaration> </typeDeclaration> </compilationUnit>
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.inspector.model; import java.io.Serializable; /** * */ public class LocalizeTextResult implements Serializable, Cloneable { /** * <p> * Confirmation details of the action performed. * </p> */ private String message; /** * <p> * The resulting list of user-readable texts. * </p> */ private java.util.List<String> results; /** * <p> * Confirmation details of the action performed. * </p> * * @param message * Confirmation details of the action performed. */ public void setMessage(String message) { this.message = message; } /** * <p> * Confirmation details of the action performed. * </p> * * @return Confirmation details of the action performed. */ public String getMessage() { return this.message; } /** * <p> * Confirmation details of the action performed. * </p> * * @param message * Confirmation details of the action performed. * @return Returns a reference to this object so that method calls can be * chained together. */ public LocalizeTextResult withMessage(String message) { setMessage(message); return this; } /** * <p> * The resulting list of user-readable texts. * </p> * * @return The resulting list of user-readable texts. */ public java.util.List<String> getResults() { return results; } /** * <p> * The resulting list of user-readable texts. * </p> * * @param results * The resulting list of user-readable texts. */ public void setResults(java.util.Collection<String> results) { if (results == null) { this.results = null; return; } this.results = new java.util.ArrayList<String>(results); } /** * <p> * The resulting list of user-readable texts. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setResults(java.util.Collection)} or * {@link #withResults(java.util.Collection)} if you want to override the * existing values. * </p> * * @param results * The resulting list of user-readable texts. * @return Returns a reference to this object so that method calls can be * chained together. */ public LocalizeTextResult withResults(String... results) { if (this.results == null) { setResults(new java.util.ArrayList<String>(results.length)); } for (String ele : results) { this.results.add(ele); } return this; } /** * <p> * The resulting list of user-readable texts. * </p> * * @param results * The resulting list of user-readable texts. * @return Returns a reference to this object so that method calls can be * chained together. */ public LocalizeTextResult withResults(java.util.Collection<String> results) { setResults(results); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMessage() != null) sb.append("Message: " + getMessage() + ","); if (getResults() != null) sb.append("Results: " + getResults()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof LocalizeTextResult == false) return false; LocalizeTextResult other = (LocalizeTextResult) obj; if (other.getMessage() == null ^ this.getMessage() == null) return false; if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false) return false; if (other.getResults() == null ^ this.getResults() == null) return false; if (other.getResults() != null && other.getResults().equals(this.getResults()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode()); hashCode = prime * hashCode + ((getResults() == null) ? 0 : getResults().hashCode()); return hashCode; } @Override public LocalizeTextResult clone() { try { return (LocalizeTextResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright (C) 2015 Simon Vig Therkildsen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.simonvt.cathode.settings.hidden; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import androidx.annotation.NonNull; import androidx.recyclerview.widget.RecyclerView; import butterknife.BindView; import butterknife.ButterKnife; import net.simonvt.cathode.R; import net.simonvt.cathode.common.ui.adapter.HeaderAdapter; import net.simonvt.cathode.common.widget.OverflowView; import net.simonvt.cathode.common.widget.RemoteImageView; import net.simonvt.cathode.entity.Movie; import net.simonvt.cathode.entity.Show; import net.simonvt.cathode.images.ImageType; import net.simonvt.cathode.images.ImageUri; public class HiddenItemsAdapter extends HeaderAdapter<Object, RecyclerView.ViewHolder> { public interface ItemCallbacks { void onShowClicked(long showId, String title, String overview); void displayShowInCalendar(long showId); void displayShowInWatched(long showId); void displayShowInCollection(long showId); void onMovieClicked(long movieId, String title, String overview); void displayMovieInCalendar(long movieId); } private static final int TYPE_SHOW = 0; private static final int TYPE_MOVIE = 1; private Context context; private ItemCallbacks itemCallbacks; public HiddenItemsAdapter(Context context, ItemCallbacks itemCallbacks) { super(context); this.context = context; this.itemCallbacks = itemCallbacks; } @Override protected int getItemViewType(int headerRes, Object item) { switch (headerRes) { case R.string.header_hidden_calendar_shows: case R.string.header_hidden_watched_shows: case R.string.header_hidden_collected_shows: return TYPE_SHOW; default: return TYPE_MOVIE; } } @Override protected boolean areItemsTheSame(@NonNull Object oldItem, @NonNull Object newItem) { if (oldItem.getClass() == newItem.getClass()) { if (oldItem instanceof Show) { return ((Show) oldItem).getId() == ((Show) newItem).getId(); } else if (oldItem instanceof Movie) { return ((Movie) oldItem).getId() == ((Movie) newItem).getId(); } } return false; } @Override protected RecyclerView.ViewHolder onCreateItemHolder(ViewGroup parent, int viewType) { ListViewHolder holder; if (viewType == TYPE_SHOW) { View v = LayoutInflater.from(context).inflate(R.layout.row_list_show, parent, false); final ShowViewHolder showHolder = new ShowViewHolder(v); holder = showHolder; showHolder.overflow.addItem(R.id.action_unhide, R.string.action_unhide); showHolder.overflow.setListener(new OverflowView.OverflowActionListener() { @Override public void onPopupShown() { } @Override public void onPopupDismissed() { } @Override public void onActionSelected(int action) { final int position = showHolder.getAdapterPosition(); if (position != RecyclerView.NO_POSITION) { Show show = (Show) getItem(position); final long itemId = show.getId(); switch (action) { case R.id.action_unhide: int headerRes = getHeaderRes(position); if (headerRes == R.string.header_hidden_calendar_shows) { itemCallbacks.displayShowInCalendar(itemId); } else if (headerRes == R.string.header_hidden_watched_shows) { itemCallbacks.displayShowInWatched(itemId); } else { itemCallbacks.displayShowInCollection(itemId); } break; } } } }); v.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { final int position = showHolder.getAdapterPosition(); if (position != RecyclerView.NO_POSITION) { Show show = (Show) getItem(position); final long itemId = show.getId(); itemCallbacks.onShowClicked(itemId, show.getTitle(), show.getOverview()); } } }); } else { View v = LayoutInflater.from(context).inflate(R.layout.row_list_movie, parent, false); final MovieViewHolder movieHolder = new MovieViewHolder(v); holder = movieHolder; movieHolder.overflow.addItem(R.id.action_unhide, R.string.action_unhide); movieHolder.overflow.setListener(new OverflowView.OverflowActionListener() { @Override public void onPopupShown() { } @Override public void onPopupDismissed() { } @Override public void onActionSelected(int action) { final int position = movieHolder.getAdapterPosition(); if (position != RecyclerView.NO_ID) { Movie movie = (Movie) getItem(position); final long itemId = movie.getId(); switch (action) { case R.id.action_unhide: int headerRes = getHeaderRes(position); if (headerRes == R.string.header_hidden_calendar_movies) { itemCallbacks.displayMovieInCalendar(itemId); } break; } } } }); v.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { final int position = movieHolder.getAdapterPosition(); if (position != RecyclerView.NO_POSITION) { Movie movie = (Movie) getItem(position); final long itemId = movie.getId(); itemCallbacks.onMovieClicked(itemId, movie.getTitle(), movie.getOverview()); } } }); } return holder; } @Override protected RecyclerView.ViewHolder onCreateHeaderHolder(ViewGroup parent) { View v = LayoutInflater.from(context).inflate(R.layout.list_row_upcoming_header, parent, false); return new HeaderViewHolder((TextView) v); } @Override public void onViewRecycled(RecyclerView.ViewHolder holder) { if (holder instanceof ListViewHolder) { ((ListViewHolder) holder).overflow.dismiss(); } } @Override protected void onBindHeader(RecyclerView.ViewHolder holder, int headerRes) { ((HeaderViewHolder) holder).header.setText(headerRes); } @Override protected void onBindViewHolder(RecyclerView.ViewHolder holder, Object object, int position) { if (holder.getItemViewType() == TYPE_SHOW) { final ShowViewHolder vh = (ShowViewHolder) holder; Show show = (Show) object; final String poster = ImageUri.create(ImageUri.ITEM_SHOW, ImageType.POSTER, show.getId()); vh.poster.setImage(poster); vh.title.setText(show.getTitle()); vh.overview.setText(show.getOverview()); } else { final MovieViewHolder vh = (MovieViewHolder) holder; Movie movie = (Movie) object; final String poster = ImageUri.create(ImageUri.ITEM_MOVIE, ImageType.POSTER, movie.getId()); vh.poster.setImage(poster); vh.title.setText(movie.getTitle()); vh.overview.setText(movie.getOverview()); } } static class HeaderViewHolder extends RecyclerView.ViewHolder { TextView header; HeaderViewHolder(TextView header) { super(header); this.header = header; } } static class ListViewHolder extends RecyclerView.ViewHolder { @BindView(R.id.overflow) OverflowView overflow; ListViewHolder(View v) { super(v); ButterKnife.bind(this, v); } } static class ShowViewHolder extends ListViewHolder { @BindView(R.id.poster) RemoteImageView poster; @BindView(R.id.title) TextView title; @BindView(R.id.overview) TextView overview; ShowViewHolder(View v) { super(v); } } static class MovieViewHolder extends ListViewHolder { @BindView(R.id.poster) RemoteImageView poster; @BindView(R.id.title) TextView title; @BindView(R.id.overview) TextView overview; MovieViewHolder(View v) { super(v); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.core.xml; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.CamelContext; import org.apache.camel.CamelException; import org.apache.camel.RoutesBuilder; import org.apache.camel.ShutdownRoute; import org.apache.camel.ShutdownRunningTask; import org.apache.camel.builder.ErrorHandlerBuilderRef; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.properties.PropertiesComponent; import org.apache.camel.component.properties.PropertiesParser; import org.apache.camel.component.properties.PropertiesResolver; import org.apache.camel.core.xml.scan.PatternBasedPackageScanFilter; import org.apache.camel.management.DefaultManagementAgent; import org.apache.camel.management.DefaultManagementLifecycleStrategy; import org.apache.camel.management.DefaultManagementStrategy; import org.apache.camel.management.ManagedManagementStrategy; import org.apache.camel.model.ContextScanDefinition; import org.apache.camel.model.FromDefinition; import org.apache.camel.model.IdentifiedType; import org.apache.camel.model.InterceptDefinition; import org.apache.camel.model.InterceptFromDefinition; import org.apache.camel.model.InterceptSendToEndpointDefinition; import org.apache.camel.model.OnCompletionDefinition; import org.apache.camel.model.OnExceptionDefinition; import org.apache.camel.model.PackageScanDefinition; import org.apache.camel.model.ProcessorDefinition; import org.apache.camel.model.RouteBuilderDefinition; import org.apache.camel.model.RouteContainer; import org.apache.camel.model.RouteContextRefDefinition; import org.apache.camel.model.RouteDefinition; import org.apache.camel.model.RouteDefinitionHelper; import org.apache.camel.model.ThreadPoolProfileDefinition; import org.apache.camel.model.TransactedDefinition; import org.apache.camel.model.config.PropertiesDefinition; import org.apache.camel.model.dataformat.DataFormatsDefinition; import org.apache.camel.processor.interceptor.Delayer; import org.apache.camel.processor.interceptor.HandleFault; import org.apache.camel.processor.interceptor.TraceFormatter; import org.apache.camel.processor.interceptor.Tracer; import org.apache.camel.spi.ClassResolver; import org.apache.camel.spi.Debugger; import org.apache.camel.spi.EventFactory; import org.apache.camel.spi.EventNotifier; import org.apache.camel.spi.ExecutorServiceStrategy; import org.apache.camel.spi.FactoryFinderResolver; import org.apache.camel.spi.InflightRepository; import org.apache.camel.spi.InterceptStrategy; import org.apache.camel.spi.LifecycleStrategy; import org.apache.camel.spi.ManagementNamingStrategy; import org.apache.camel.spi.ManagementStrategy; import org.apache.camel.spi.PackageScanClassResolver; import org.apache.camel.spi.PackageScanFilter; import org.apache.camel.spi.ProcessorFactory; import org.apache.camel.spi.ShutdownStrategy; import org.apache.camel.spi.ThreadPoolProfile; import org.apache.camel.spi.UuidGenerator; import org.apache.camel.util.CamelContextHelper; import org.apache.camel.util.EndpointHelper; import org.apache.camel.util.ObjectHelper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * A factory to create and initialize a * {@link CamelContext} and install routes either explicitly configured * or found by searching the classpath for Java classes which extend * {@link org.apache.camel.builder.RouteBuilder}. * * @version $Revision: 938746 $ */ @XmlAccessorType(XmlAccessType.FIELD) public abstract class AbstractCamelContextFactoryBean<T extends CamelContext> extends IdentifiedType implements RouteContainer { private static final Log LOG = LogFactory.getLog(AbstractCamelContextFactoryBean.class); @XmlTransient private List<RoutesBuilder> builders = new ArrayList<RoutesBuilder>(); @XmlTransient private ClassLoader contextClassLoaderOnStart; public AbstractCamelContextFactoryBean() { // Lets keep track of the class loader for when we actually do start things up contextClassLoaderOnStart = Thread.currentThread().getContextClassLoader(); } public Object getObject() throws Exception { return getContext(); } public Class getObjectType() { return CamelContext.class; } public boolean isSingleton() { return true; } public ClassLoader getContextClassLoaderOnStart() { return contextClassLoaderOnStart; } public void afterPropertiesSet() throws Exception { if (ObjectHelper.isEmpty(getId())) { throw new IllegalArgumentException("Id must be set"); } if (getProperties() != null) { getContext().setProperties(getProperties().asMap()); } // set the type converter mode first if (getLazyLoadTypeConverters() != null) { getContext().setLazyLoadTypeConverters(getLazyLoadTypeConverters()); } PackageScanClassResolver packageResolver = getBeanForType(PackageScanClassResolver.class); if (packageResolver != null) { LOG.info("Using custom PackageScanClassResolver: " + packageResolver); getContext().setPackageScanClassResolver(packageResolver); } ClassResolver classResolver = getBeanForType(ClassResolver.class); if (classResolver != null) { LOG.info("Using custom ClassResolver: " + classResolver); getContext().setClassResolver(classResolver); } FactoryFinderResolver factoryFinderResolver = getBeanForType(FactoryFinderResolver.class); if (factoryFinderResolver != null) { LOG.info("Using custom FactoryFinderResolver: " + factoryFinderResolver); getContext().setFactoryFinderResolver(factoryFinderResolver); } ExecutorServiceStrategy executorServiceStrategy = getBeanForType(ExecutorServiceStrategy.class); if (executorServiceStrategy != null) { LOG.info("Using custom ExecutorServiceStrategy: " + executorServiceStrategy); getContext().setExecutorServiceStrategy(executorServiceStrategy); } ProcessorFactory processorFactory = getBeanForType(ProcessorFactory.class); if (processorFactory != null) { LOG.info("Using custom ProcessorFactory: " + processorFactory); getContext().setProcessorFactory(processorFactory); } Debugger debugger = getBeanForType(Debugger.class); if (debugger != null) { LOG.info("Using custom Debugger: " + debugger); getContext().setDebugger(debugger); } UuidGenerator uuidGenerator = getBeanForType(UuidGenerator.class); if (uuidGenerator != null) { LOG.info("Using custom UuidGenerator: " + uuidGenerator); getContext().setUuidGenerator(uuidGenerator); } // set the custom registry if defined initCustomRegistry(getContext()); // setup property placeholder so we got it as early as possible initPropertyPlaceholder(); // setup JMX agent at first initJMXAgent(); Tracer tracer = getBeanForType(Tracer.class); if (tracer != null) { // use formatter if there is a TraceFormatter bean defined TraceFormatter formatter = getBeanForType(TraceFormatter.class); if (formatter != null) { tracer.setFormatter(formatter); } LOG.info("Using custom Tracer: " + tracer); getContext().addInterceptStrategy(tracer); } HandleFault handleFault = getBeanForType(HandleFault.class); if (handleFault != null) { LOG.info("Using custom HandleFault: " + handleFault); getContext().addInterceptStrategy(handleFault); } Delayer delayer = getBeanForType(Delayer.class); if (delayer != null) { LOG.info("Using custom Delayer: " + delayer); getContext().addInterceptStrategy(delayer); } InflightRepository inflightRepository = getBeanForType(InflightRepository.class); if (delayer != null) { LOG.info("Using custom InflightRepository: " + inflightRepository); getContext().setInflightRepository(inflightRepository); } ManagementStrategy managementStrategy = getBeanForType(ManagementStrategy.class); if (managementStrategy != null) { LOG.info("Using custom ManagementStrategy: " + managementStrategy); getContext().setManagementStrategy(managementStrategy); } ManagementNamingStrategy managementNamingStrategy = getBeanForType(ManagementNamingStrategy.class); if (managementNamingStrategy != null) { LOG.info("Using custom ManagementNamingStrategy: " + managementNamingStrategy); getContext().getManagementStrategy().setManagementNamingStrategy(managementNamingStrategy); } EventFactory eventFactory = getBeanForType(EventFactory.class); if (eventFactory != null) { LOG.info("Using custom EventFactory: " + eventFactory); getContext().getManagementStrategy().setEventFactory(eventFactory); } // set the event notifier strategies if defined Map<String, EventNotifier> eventNotifiers = getContext().getRegistry().lookupByType(EventNotifier.class); if (eventNotifiers != null && !eventNotifiers.isEmpty()) { for (String id : eventNotifiers.keySet()) { EventNotifier notifier = eventNotifiers.get(id); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (!getContext().getManagementStrategy().getEventNotifiers().contains(notifier)) { LOG.info("Using custom EventNotifier with id: " + id + " and implementation: " + notifier); getContext().getManagementStrategy().addEventNotifier(notifier); } } } ShutdownStrategy shutdownStrategy = getBeanForType(ShutdownStrategy.class); if (shutdownStrategy != null) { LOG.info("Using custom ShutdownStrategy: " + shutdownStrategy); getContext().setShutdownStrategy(shutdownStrategy); } // add global interceptors Map<String, InterceptStrategy> interceptStrategies = getContext().getRegistry().lookupByType(InterceptStrategy.class); if (interceptStrategies != null && !interceptStrategies.isEmpty()) { for (String id : interceptStrategies.keySet()) { InterceptStrategy strategy = interceptStrategies.get(id); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (!getContext().getInterceptStrategies().contains(strategy)) { LOG.info("Using custom InterceptStrategy with id: " + id + " and implementation: " + strategy); getContext().addInterceptStrategy(strategy); } } } // set the lifecycle strategy if defined Map<String, LifecycleStrategy> lifecycleStrategies = getContext().getRegistry().lookupByType(LifecycleStrategy.class); if (lifecycleStrategies != null && !lifecycleStrategies.isEmpty()) { for (String id : lifecycleStrategies.keySet()) { LifecycleStrategy strategy = lifecycleStrategies.get(id); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (!getContext().getLifecycleStrategies().contains(strategy)) { LOG.info("Using custom LifecycleStrategy with id: " + id + " and implementation: " + strategy); getContext().addLifecycleStrategy(strategy); } } } // set the default thread pool profile if defined initThreadPoolProfiles(getContext()); // Set the application context and camelContext for the beanPostProcessor initBeanPostProcessor(getContext()); // init camel context initCamelContext(getContext()); // must init route refs before we prepare the routes below initRouteRefs(); // do special preparation for some concepts such as interceptors and policies // this is needed as JAXB does not build exactly the same model definition as Spring DSL would do // using route builders. So we have here a little custom code to fix the JAXB gaps prepareRoutes(); // and add the routes getContext().addRouteDefinitions(getRoutes()); if (LOG.isDebugEnabled()) { LOG.debug("Found JAXB created routes: " + getRoutes()); } findRouteBuilders(); installRoutes(); } /** * Do special preparation for some concepts such as interceptors and policies * this is needed as JAXB does not build exactly the same model definition as Spring DSL would do * using route builders. So we have here a little custom code to fix the JAXB gaps */ private void prepareRoutes() { for (RouteDefinition route : getRoutes()) { // leverage logic from route definition helper to prepare the route RouteDefinitionHelper.prepareRoute(route, getOnExceptions(), getIntercepts(), getInterceptFroms(), getInterceptSendToEndpoints(), getOnCompletions()); // mark the route as prepared now route.markPrepared(); } } protected abstract void initCustomRegistry(T context); private void initJMXAgent() throws Exception { CamelJMXAgentDefinition camelJMXAgent = getCamelJMXAgent(); if (camelJMXAgent != null && camelJMXAgent.isAgentDisabled()) { LOG.info("JMXAgent disabled"); // clear the existing lifecycle strategies define by the DefaultCamelContext constructor getContext().getLifecycleStrategies().clear(); // no need to add a lifecycle strategy as we do not need one as JMX is disabled getContext().setManagementStrategy(new DefaultManagementStrategy()); } else if (camelJMXAgent != null) { LOG.info("JMXAgent enabled: " + camelJMXAgent); DefaultManagementAgent agent = new DefaultManagementAgent(getContext()); agent.setConnectorPort(CamelContextHelper.parseInteger(getContext(), camelJMXAgent.getConnectorPort())); agent.setCreateConnector(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getCreateConnector())); agent.setMBeanObjectDomainName(CamelContextHelper.parseText(getContext(), camelJMXAgent.getMbeanObjectDomainName())); agent.setMBeanServerDefaultDomain(CamelContextHelper.parseText(getContext(), camelJMXAgent.getMbeanServerDefaultDomain())); agent.setRegistryPort(CamelContextHelper.parseInteger(getContext(), camelJMXAgent.getRegistryPort())); agent.setServiceUrlPath(CamelContextHelper.parseText(getContext(), camelJMXAgent.getServiceUrlPath())); agent.setUsePlatformMBeanServer(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getUsePlatformMBeanServer())); agent.setOnlyRegisterProcessorWithCustomId(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getOnlyRegisterProcessorWithCustomId())); ManagementStrategy managementStrategy = new ManagedManagementStrategy(agent); getContext().setManagementStrategy(managementStrategy); // clear the existing lifecycle strategies define by the DefaultCamelContext constructor getContext().getLifecycleStrategies().clear(); getContext().addLifecycleStrategy(new DefaultManagementLifecycleStrategy(getContext())); // set additional configuration from camelJMXAgent boolean onlyId = agent.getOnlyRegisterProcessorWithCustomId() != null && agent.getOnlyRegisterProcessorWithCustomId(); getContext().getManagementStrategy().onlyManageProcessorWithCustomId(onlyId); getContext().getManagementStrategy().setStatisticsLevel(camelJMXAgent.getStatisticsLevel()); } } private void initPropertyPlaceholder() throws Exception { if (getCamelPropertyPlaceholder() != null) { CamelPropertyPlaceholderDefinition def = getCamelPropertyPlaceholder(); PropertiesComponent pc = new PropertiesComponent(); pc.setLocation(def.getLocation()); // if using a custom resolver if (ObjectHelper.isNotEmpty(def.getPropertiesResolverRef())) { PropertiesResolver resolver = CamelContextHelper.mandatoryLookup(getContext(), def.getPropertiesResolverRef(), PropertiesResolver.class); pc.setPropertiesResolver(resolver); } // if using a custom parser if (ObjectHelper.isNotEmpty(def.getPropertiesParserRef())) { PropertiesParser parser = CamelContextHelper.mandatoryLookup(getContext(), def.getPropertiesParserRef(), PropertiesParser.class); pc.setPropertiesParser(parser); } // register the properties component getContext().addComponent("properties", pc); } } private void initRouteRefs() throws Exception { // add route refs to existing routes if (getRouteRefs() != null) { for (RouteContextRefDefinition ref : getRouteRefs()) { List<RouteDefinition> defs = ref.lookupRoutes(getContext()); for (RouteDefinition def : defs) { if (LOG.isDebugEnabled()) { LOG.debug("Adding route from " + ref + " -> " + def); } // add in top as they are most likely to be common/shared // which you may want to start first getRoutes().add(0, def); } } } } protected abstract <S> S getBeanForType(Class<S> clazz); public void destroy() throws Exception { getContext().stop(); } // Properties // ------------------------------------------------------------------------- public T getContext() { return getContext(true); } public abstract T getContext(boolean create); public abstract List<RouteDefinition> getRoutes(); public abstract List<InterceptDefinition> getIntercepts(); public abstract List<InterceptFromDefinition> getInterceptFroms(); public abstract List<InterceptSendToEndpointDefinition> getInterceptSendToEndpoints(); public abstract PropertiesDefinition getProperties(); public abstract String[] getPackages(); public abstract PackageScanDefinition getPackageScan(); public abstract void setPackageScan(PackageScanDefinition packageScan); public abstract ContextScanDefinition getContextScan(); public abstract void setContextScan(ContextScanDefinition contextScan); public abstract CamelPropertyPlaceholderDefinition getCamelPropertyPlaceholder(); public abstract String getTrace(); public abstract String getStreamCache(); public abstract String getDelayer(); public abstract String getHandleFault(); public abstract String getAutoStartup(); public abstract Boolean getLazyLoadTypeConverters(); public abstract CamelJMXAgentDefinition getCamelJMXAgent(); public abstract List<RouteBuilderDefinition> getBuilderRefs(); public abstract List<RouteContextRefDefinition> getRouteRefs(); public abstract String getErrorHandlerRef(); public abstract DataFormatsDefinition getDataFormats(); public abstract List<OnExceptionDefinition> getOnExceptions(); public abstract List<OnCompletionDefinition> getOnCompletions(); public abstract ShutdownRoute getShutdownRoute(); public abstract ShutdownRunningTask getShutdownRunningTask(); public abstract List<ThreadPoolProfileDefinition> getThreadPoolProfiles(); public abstract String getDependsOn(); // Implementation methods // ------------------------------------------------------------------------- /** * Initializes the context * * @param ctx the context * @throws Exception is thrown if error occurred */ protected void initCamelContext(T ctx) throws Exception { if (getStreamCache() != null) { ctx.setStreamCaching(CamelContextHelper.parseBoolean(getContext(), getStreamCache())); } if (getTrace() != null) { ctx.setTracing(CamelContextHelper.parseBoolean(getContext(), getTrace())); } if (getDelayer() != null) { ctx.setDelayer(CamelContextHelper.parseLong(getContext(), getDelayer())); } if (getHandleFault() != null) { ctx.setHandleFault(CamelContextHelper.parseBoolean(getContext(), getHandleFault())); } if (getErrorHandlerRef() != null) { ctx.setErrorHandlerBuilder(new ErrorHandlerBuilderRef(getErrorHandlerRef())); } if (getAutoStartup() != null) { ctx.setAutoStartup(CamelContextHelper.parseBoolean(getContext(), getAutoStartup())); } if (getShutdownRoute() != null) { ctx.setShutdownRoute(getShutdownRoute()); } if (getShutdownRunningTask() != null) { ctx.setShutdownRunningTask(getShutdownRunningTask()); } if (getDataFormats() != null) { ctx.setDataFormats(getDataFormats().asMap()); } } private void initThreadPoolProfiles(T context) { Set<String> defaultIds = new HashSet<String>(); // lookup and use custom profiles from the registry Map<String, ThreadPoolProfile> profiles = context.getRegistry().lookupByType(ThreadPoolProfile.class); if (profiles != null && !profiles.isEmpty()) { for (String id : profiles.keySet()) { ThreadPoolProfile profile = profiles.get(id); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (profile.isDefaultProfile()) { LOG.info("Using custom default ThreadPoolProfile with id: " + id + " and implementation: " + profile); context.getExecutorServiceStrategy().setDefaultThreadPoolProfile(profile); defaultIds.add(id); } else { context.getExecutorServiceStrategy().registerThreadPoolProfile(profile); } } } // use custom profiles defined in the CamelContext if (getThreadPoolProfiles() != null && !getThreadPoolProfiles().isEmpty()) { for (ThreadPoolProfileDefinition profile : getThreadPoolProfiles()) { if (profile.isDefaultProfile()) { LOG.info("Using custom default ThreadPoolProfile with id: " + profile.getId() + " and implementation: " + profile); context.getExecutorServiceStrategy().setDefaultThreadPoolProfile(profile); defaultIds.add(profile.getId()); } else { context.getExecutorServiceStrategy().registerThreadPoolProfile(profile); } } } // validate at most one is defined if (defaultIds.size() > 1) { throw new IllegalArgumentException("Only exactly one default ThreadPoolProfile is allowed, was " + defaultIds.size() + " ids: " + defaultIds); } } protected abstract void initBeanPostProcessor(T context); /** * Strategy to install all available routes into the context */ protected void installRoutes() throws Exception { List<RouteBuilder> builders = new ArrayList<RouteBuilder>(); // lets add route builders added from references if (getBuilderRefs() != null) { for (RouteBuilderDefinition builderRef : getBuilderRefs()) { RouteBuilder builder = builderRef.createRouteBuilder(getContext()); if (builder != null) { builders.add(builder); } else { // support to get the route here RoutesBuilder routes = builderRef.createRoutes(getContext()); if (routes != null) { this.builders.add(routes); } else { // Throw the exception that we can't find any build here throw new CamelException("Cannot find any routes with this RouteBuilder reference: " + builderRef); } } } } // install already configured routes for (RoutesBuilder routeBuilder : this.builders) { getContext().addRoutes(routeBuilder); } // install builders for (RouteBuilder builder : builders) { // Inject the annotated resource postProcessBeforeInit(builder); getContext().addRoutes(builder); } } protected abstract void postProcessBeforeInit(RouteBuilder builder); /** * Strategy method to try find {@link org.apache.camel.builder.RouteBuilder} instances on the classpath */ protected void findRouteBuilders() throws Exception { // package scan addPackageElementContentsToScanDefinition(); PackageScanDefinition packageScanDef = getPackageScan(); if (packageScanDef != null && packageScanDef.getPackages().size() > 0) { // use package scan filter PatternBasedPackageScanFilter filter = new PatternBasedPackageScanFilter(); // support property placeholders in include and exclude for (String include : packageScanDef.getIncludes()) { include = getContext().resolvePropertyPlaceholders(include); filter.addIncludePattern(include); } for (String exclude : packageScanDef.getExcludes()) { exclude = getContext().resolvePropertyPlaceholders(exclude); filter.addExcludePattern(exclude); } String[] normalized = normalizePackages(getContext(), packageScanDef.getPackages()); findRouteBuildersByPackageScan(normalized, filter, builders); } // context scan ContextScanDefinition contextScanDef = getContextScan(); if (contextScanDef != null) { // use package scan filter PatternBasedPackageScanFilter filter = new PatternBasedPackageScanFilter(); // support property placeholders in include and exclude for (String include : contextScanDef.getIncludes()) { include = getContext().resolvePropertyPlaceholders(include); filter.addIncludePattern(include); } for (String exclude : contextScanDef.getExcludes()) { exclude = getContext().resolvePropertyPlaceholders(exclude); filter.addExcludePattern(exclude); } findRouteBuildersByContextScan(filter, builders); } } protected abstract void findRouteBuildersByPackageScan(String[] packages, PackageScanFilter filter, List<RoutesBuilder> builders) throws Exception; protected abstract void findRouteBuildersByContextScan(PackageScanFilter filter, List<RoutesBuilder> builders) throws Exception; private void addPackageElementContentsToScanDefinition() { PackageScanDefinition packageScanDef = getPackageScan(); if (getPackages() != null && getPackages().length > 0) { if (packageScanDef == null) { packageScanDef = new PackageScanDefinition(); setPackageScan(packageScanDef); } for (String pkg : getPackages()) { packageScanDef.getPackages().add(pkg); } } } private String[] normalizePackages(T context, List<String> unnormalized) throws Exception { List<String> packages = new ArrayList<String>(); for (String name : unnormalized) { // it may use property placeholders name = context.resolvePropertyPlaceholders(name); name = ObjectHelper.normalizeClassName(name); if (ObjectHelper.isNotEmpty(name)) { if (LOG.isTraceEnabled()) { LOG.trace("Using package: " + name + " to scan for RouteBuilder classes"); } packages.add(name); } } return packages.toArray(new String[packages.size()]); } }
// Copyright 2019 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package net.starlark.java.eval; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.errorprone.annotations.CheckReturnValue; import com.google.errorprone.annotations.FormatMethod; import java.io.IOException; import java.io.OutputStream; import java.lang.reflect.Method; import java.math.BigInteger; import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import net.starlark.java.annot.StarlarkBuiltin; import net.starlark.java.annot.StarlarkInterfaceUtils; import net.starlark.java.annot.StarlarkMethod; import net.starlark.java.spelling.SpellChecker; import net.starlark.java.syntax.Expression; import net.starlark.java.syntax.FileOptions; import net.starlark.java.syntax.ParserInput; import net.starlark.java.syntax.Program; import net.starlark.java.syntax.StarlarkFile; import net.starlark.java.syntax.SyntaxError; /** * The Starlark class defines the most important entry points, constants, and functions needed by * all clients of the Starlark interpreter. */ public final class Starlark { private Starlark() {} // uninstantiable /** The Starlark None value. */ public static final NoneType NONE = NoneType.NONE; /** * A sentinel value passed to optional parameters of StarlarkMethod-annotated methods to indicate * that no argument value was supplied. */ public static final Object UNBOUND = new UnboundMarker(); @Immutable private static final class UnboundMarker implements StarlarkValue { private UnboundMarker() {} @Override public String toString() { return "<unbound>"; } @Override public boolean isImmutable() { return true; } @Override public void repr(Printer printer) { printer.append("<unbound>"); } } /** * The universal bindings predeclared in every Starlark file, such as None, True, len, and range. */ public static final ImmutableMap<String, Object> UNIVERSE = makeUniverse(); private static ImmutableMap<String, Object> makeUniverse() { ImmutableMap.Builder<String, Object> env = ImmutableMap.builder(); env // .put("False", false) .put("True", true) .put("None", NONE); addMethods(env, new MethodLibrary()); return env.build(); } /** * Reports whether the argument is a legal Starlark value: a string, boolean, or StarlarkValue. */ public static boolean valid(Object x) { return x instanceof StarlarkValue || x instanceof String || x instanceof Boolean; } /** * Returns {@code x} if it is a {@link #valid} Starlark value, otherwise throws * IllegalArgumentException. */ public static <T> T checkValid(T x) { if (!valid(x)) { throw new IllegalArgumentException("invalid Starlark value: " + x.getClass()); } return x; } /** Reports whether {@code x} is Java null or Starlark None. */ public static boolean isNullOrNone(Object x) { return x == null || x == NONE; } /** Reports whether a Starlark value is assumed to be deeply immutable. */ // TODO(adonovan): eliminate the concept of querying for immutability. It is currently used for // only one purpose, the precondition for adding an element to a Depset, but Depsets should check // hashability, like Dicts. (Similarly, querying for hashability should go: just attempt to hash a // value, and be prepared for it to fail.) In practice, a value may be immutable, either // inherently (e.g. string) or because it has become frozen, but we don't need to query for it. // Just attempt a mutation and be prepared for it to fail. // It is inefficient and potentially inconsistent to ask before doing. // // The main obstacle is that although depsets disallow (say) lists as keys even when frozen, // they permit a tuple of lists, or a struct containing lists, and many users exploit this. public static boolean isImmutable(Object x) { // NB: This is used as the basis for accepting objects in Depsets, // as well as for accepting objects as keys for Starlark dicts. if (x instanceof String || x instanceof Boolean) { return true; } else if (x instanceof StarlarkValue) { return ((StarlarkValue) x).isImmutable(); } else { throw new IllegalArgumentException("invalid Starlark value: " + x.getClass()); } } /** * Converts a Java value {@code x} to a Starlark one, if x is not already a valid Starlark value. * An Integer, Long, or BigInteger is converted to a Starlark int, a Java List or Map is converted * to a Starlark list or dict, respectively, and null becomes {@link #NONE}. Any other * non-Starlark value causes the function to throw IllegalArgumentException. * * <p>This function is applied to the results of StarlarkMethod-annotated Java methods. */ public static Object fromJava(Object x, @Nullable Mutability mutability) { if (x == null) { return NONE; } else if (valid(x)) { return x; } else if (x instanceof Number) { if (x instanceof Integer) { return StarlarkInt.of((Integer) x); } else if (x instanceof Long) { return StarlarkInt.of((Long) x); } else if (x instanceof BigInteger) { return StarlarkInt.of((BigInteger) x); } } else if (x instanceof List) { return StarlarkList.copyOf(mutability, (List<?>) x); } else if (x instanceof Map) { return Dict.copyOf(mutability, (Map<?, ?>) x); } throw new IllegalArgumentException("cannot expose internal type to Starlark: " + x.getClass()); } /** * Returns the truth value of a valid Starlark value, as if by the Starlark expression {@code * bool(x)}. */ public static boolean truth(Object x) { if (x instanceof Boolean) { return (Boolean) x; } else if (x instanceof StarlarkValue) { return ((StarlarkValue) x).truth(); } else if (x instanceof String) { return !((String) x).isEmpty(); } else { throw new IllegalArgumentException("invalid Starlark value: " + x.getClass()); } } /** * Checks whether the Freezable Starlark value is frozen or temporarily immutable due to active * iterators. * * @throws EvalException if the value is not mutable. */ public static void checkMutable(Mutability.Freezable x) throws EvalException { if (x.mutability().isFrozen()) { throw errorf("trying to mutate a frozen %s value", type(x)); } if (x.updateIteratorCount(0)) { throw errorf("%s value is temporarily immutable due to active for-loop iteration", type(x)); } } /** * Returns an iterable view of {@code x} if it is an iterable Starlark value; throws EvalException * otherwise. * * <p>Whereas the interpreter temporarily freezes the iterable value by bracketing {@code for} * loops and comprehensions in calls to {@link Freezable#updateIteratorCount}, iteration using * this method does not freeze the value. Callers should exercise care not to mutate the * underlying object during iteration. */ public static Iterable<?> toIterable(Object x) throws EvalException { if (x instanceof StarlarkIterable) { return (Iterable<?>) x; } throw errorf("type '%s' is not iterable", type(x)); } /** * Returns a new array containing the elements of Starlark iterable value {@code x}. A Starlark * value is iterable if it implements {@link StarlarkIterable}. */ public static Object[] toArray(Object x) throws EvalException { // Specialize Sequence and Dict to avoid allocation and/or indirection. if (x instanceof Sequence) { return ((Sequence<?>) x).toArray(); } else if (x instanceof Dict) { return ((Dict<?, ?>) x).keySet().toArray(); } else { return Iterables.toArray(toIterable(x), Object.class); } } /** * Returns the length of a Starlark string, sequence (such as a list or tuple), dict, or other * iterable, as if by the Starlark expression {@code len(x)}, or -1 if the value is valid but has * no length. */ public static int len(Object x) { if (x instanceof String) { return ((String) x).length(); } else if (x instanceof Sequence) { return ((Sequence) x).size(); } else if (x instanceof Dict) { return ((Dict) x).size(); } else if (x instanceof StarlarkIterable) { // Iterables.size runs in constant time if x implements Collection. return Iterables.size((Iterable<?>) x); } else { checkValid(x); return -1; // valid but not a sequence } } /** Returns the name of the type of a value as if by the Starlark expression {@code type(x)}. */ public static String type(Object x) { return classType(x.getClass()); } /** * Returns the name of the type of instances of class c. * * <p>This function accepts any class, not just those of legal Starlark values, and may be used * for reporting error messages involving arbitrary Java classes, for example at the interface * between Starlark and Java. */ public static String classType(Class<?> c) { // Check for "direct hits" first to avoid needing to scan for annotations. if (c.equals(String.class)) { return "string"; } else if (StarlarkInt.class.isAssignableFrom(c)) { return "int"; } else if (c.equals(Integer.class)) { // Integer is not a legal Starlark value, but it is used for parameter types // in built-in functions; StarlarkBuiltin.fastcall does a range check // and reboxing. Use of this type means "signed 32-bit int value", // but that's a lot for an error message. return "int"; } else if (c.equals(Boolean.class)) { return "bool"; } // Shortcut for the most common types. // These cases can be handled by `getStarlarkBuiltin` // but `getStarlarkBuiltin` is quite expensive. if (c.equals(StarlarkList.class)) { return "list"; } else if (c.equals(Tuple.class)) { return "tuple"; } else if (c.equals(Dict.class)) { return "dict"; } else if (c.equals(NoneType.class)) { return "NoneType"; } else if (c.equals(StarlarkFunction.class)) { return "function"; } else if (c.equals(RangeList.class)) { return "range"; } else if (c.equals(UnboundMarker.class)) { return "unbound"; } StarlarkBuiltin module = StarlarkInterfaceUtils.getStarlarkBuiltin(c); if (module != null) { return module.name(); } else if (StarlarkCallable.class.isAssignableFrom(c)) { // All callable values have historically been lumped together as "function". // TODO(adonovan): built-in types that don't use StarlarkModule should report // their own type string, but this is a breaking change as users often // use type(x)=="function" for Starlark and built-in functions. return "function"; } else if (c.equals(Object.class)) { // "Unknown" is another unfortunate choice. // Object.class does mean "unknown" when talking about the type parameter // of a collection (List<Object>), but it also means "any" when used // as an argument to Sequence.cast, and more generally it means "value". return "unknown"; } else if (List.class.isAssignableFrom(c)) { // Any class of java.util.List that isn't a Sequence. return "List"; } else if (Map.class.isAssignableFrom(c)) { // Any class of java.util.Map that isn't a Dict. return "Map"; } else { String simpleName = c.getSimpleName(); return simpleName.isEmpty() ? c.getName() : simpleName; } } /** Returns the string form of a value as if by the Starlark expression {@code str(x)}. */ public static String str(Object x) { return new Printer().str(x).toString(); } /** Returns the string form of a value as if by the Starlark expression {@code repr(x)}. */ public static String repr(Object x) { return new Printer().repr(x).toString(); } /** Returns a string formatted as if by the Starlark expression {@code pattern % arguments}. */ public static String format(String pattern, Object... arguments) { Printer pr = new Printer(); Printer.format(pr, pattern, arguments); return pr.toString(); } /** Returns a string formatted as if by the Starlark expression {@code pattern % arguments}. */ public static String formatWithList(String pattern, List<?> arguments) { Printer pr = new Printer(); Printer.formatWithList(pr, pattern, arguments); return pr.toString(); } /** Returns a slice of a sequence as if by the Starlark operation {@code x[start:stop:step]}. */ public static Object slice( Mutability mu, Object x, Object startObj, Object stopObj, Object stepObj) throws EvalException { int n; if (x instanceof String) { n = ((String) x).length(); } else if (x instanceof Sequence) { n = ((Sequence) x).size(); } else { throw errorf("invalid slice operand: %s", type(x)); } int start; int stop; int step; // step if (stepObj == NONE) { step = 1; } else { step = toInt(stepObj, "slice step"); if (step == 0) { throw errorf("slice step cannot be zero"); } } // start, stop if (step > 0) { // positive stride: default indices are [0:n]. if (startObj == NONE) { start = 0; } else { start = EvalUtils.toIndex(toInt(startObj, "start index"), n); } if (stopObj == NONE) { stop = n; } else { stop = EvalUtils.toIndex(toInt(stopObj, "stop index"), n); } if (stop < start) { stop = start; // => empty result } } else { // negative stride: default indices are effectively [n-1:-1], // though to get this effect using explicit indices requires // [n-1:-1-n:-1] because of the treatment of negative values. if (startObj == NONE) { start = n - 1; } else { start = toInt(startObj, "start index"); if (start < 0) { start += n; } if (start >= n) { start = n - 1; } } if (stopObj == NONE) { stop = -1; } else { stop = toInt(stopObj, "stop index"); if (stop < 0) { stop += n; } if (stop < -1) { stop = -1; } } if (start < stop) { start = stop; // => empty result } } // slice operation if (x instanceof String) { return StringModule.slice((String) x, start, stop, step); } else { return ((Sequence<?>) x).getSlice(mu, start, stop, step); } } /** * Returns the signed 32-bit value of a Starlark int. Throws an exception including {@code what} * if x is not a Starlark int or its value is not exactly representable as a Java int. * * @throws IllegalArgumentException if x is an Integer, which is not a Starlark value. */ public static int toInt(Object x, String what) throws EvalException { if (x instanceof StarlarkInt) { return ((StarlarkInt) x).toInt(what); } if (x instanceof Integer) { throw new IllegalArgumentException("Integer is not a legal Starlark value"); } throw errorf("got %s for %s, want int", type(x), what); } /** * Calls the function-like value {@code fn} in the specified thread, passing it the given * positional and named arguments, as if by the Starlark expression {@code fn(*args, **kwargs)}. * * <p>See also {@link #fastcall}. */ public static Object call( StarlarkThread thread, Object fn, List<Object> args, Map<String, Object> kwargs) throws EvalException, InterruptedException { Object[] named = new Object[2 * kwargs.size()]; int i = 0; for (Map.Entry<String, Object> e : kwargs.entrySet()) { named[i++] = e.getKey(); named[i++] = e.getValue(); } return fastcall(thread, fn, args.toArray(), named); } /** * Calls the function-like value {@code fn} in the specified thread, passing it the given * positional and named arguments in the "fastcall" array representation. * * <p>The caller must not subsequently modify or even inspect the two arrays. * * <p>If the call throws a StackOverflowError or any instance of RuntimeException (other than * UncheckedEvalException), regardless of whether it originates in a user-defined built-in * function or a bug in the interpreter itself, the exception is wrapped by an * UncheckedEvalException whose message includes the Starlark stack. The original exception may be * retrieved using {@code getCause}. */ public static Object fastcall( StarlarkThread thread, Object fn, Object[] positional, Object[] named) throws EvalException, InterruptedException { StarlarkCallable callable; if (fn instanceof StarlarkCallable) { callable = (StarlarkCallable) fn; } else { // @StarlarkMethod(selfCall)? MethodDescriptor desc = CallUtils.getSelfCallMethodDescriptor(thread.getSemantics(), fn.getClass()); if (desc == null) { throw errorf("'%s' object is not callable", type(fn)); } callable = new BuiltinCallable(fn, desc.getName(), desc); } thread.push(callable); try { return callable.fastcall(thread, positional, named); } catch (UncheckedEvalException ex) { throw ex; // already wrapped } catch (RuntimeException | StackOverflowError ex) { throw new UncheckedEvalException(ex, thread.getCallStack()); } catch (EvalException ex) { // If this exception was newly thrown, set its stack. throw ex.ensureStack(thread); } finally { thread.pop(); } } /** * An UncheckedEvalException decorates an unchecked exception with its Starlark stack, to help * maintainers locate problematic source expressions. The original exception can be retrieved * using {@code getCause}. */ public static final class UncheckedEvalException extends RuntimeException { private final ImmutableList<StarlarkThread.CallStackEntry> stack; private UncheckedEvalException( Throwable cause, ImmutableList<StarlarkThread.CallStackEntry> stack) { super(cause); this.stack = stack; } /** Returns the stack of Starlark calls active at the moment of the error. */ public ImmutableList<StarlarkThread.CallStackEntry> getCallStack() { return stack; } @Override public String getMessage() { return String.format("%s (Starlark stack: %s)", super.getMessage(), stack); } } /** * Returns a new EvalException with no location and an error message produced by Java-style string * formatting ({@code String.format(format, args)}). Use {@code errorf("%s", msg)} to produce an * error message from a non-constant expression {@code msg}. */ @FormatMethod @CheckReturnValue // don't forget to throw it public static EvalException errorf(String format, Object... args) { return new EvalException(String.format(format, args)); } // --- methods related to attributes (fields and methods) --- /** * Reports whether the value {@code x} has a field or method of the given name, as if by the * Starlark expression {@code hasattr(x, name)}. */ public static boolean hasattr(StarlarkSemantics semantics, Object x, String name) throws EvalException { return (x instanceof ClassObject && ((ClassObject) x).getValue(name) != null) || CallUtils.getAnnotatedMethods(semantics, x.getClass()).containsKey(name); } /** * Returns the named field or method of value {@code x}, as if by the Starlark expression {@code * getattr(x, name, defaultValue)}. If the value has no such attribute, getattr returns {@code * defaultValue} if non-null, or throws an EvalException otherwise. */ public static Object getattr( Mutability mu, StarlarkSemantics semantics, Object x, String name, @Nullable Object defaultValue) throws EvalException, InterruptedException { // StarlarkMethod-annotated field or method? MethodDescriptor method = CallUtils.getAnnotatedMethods(semantics, x.getClass()).get(name); if (method != null) { if (method.isStructField()) { return method.callField(x, semantics, mu); } else { return new BuiltinCallable(x, name, method); } } // user-defined field? if (x instanceof ClassObject) { ClassObject obj = (ClassObject) x; Object field = obj.getValue(semantics, name); if (field != null) { return Starlark.checkValid(field); } if (defaultValue != null) { return defaultValue; } String error = obj.getErrorMessageForUnknownField(name); if (error != null) { throw Starlark.errorf("%s", error); } } else if (defaultValue != null) { return defaultValue; } throw Starlark.errorf( "'%s' value has no field or method '%s'%s", Starlark.type(x), name, SpellChecker.didYouMean(name, dir(mu, semantics, x))); } /** * Returns a new sorted list containing the names of the Starlark-accessible fields and methods of * the specified value, as if by the Starlark expression {@code dir(x)}. */ public static StarlarkList<String> dir(Mutability mu, StarlarkSemantics semantics, Object x) { // Order the fields alphabetically. Set<String> fields = new TreeSet<>(); if (x instanceof ClassObject) { fields.addAll(((ClassObject) x).getFieldNames()); } fields.addAll(CallUtils.getAnnotatedMethods(semantics, x.getClass()).keySet()); return StarlarkList.copyOf(mu, fields); } // --- methods related to StarlarkMethod-annotated classes --- /** * Returns the value of the named field of Starlark value {@code x}, as defined by a Java method * with a {@code StarlarkMethod(structField=true)} annotation. * * <p>Most callers should use {@link #getattr} instead. */ public static Object getAnnotatedField(StarlarkSemantics semantics, Object x, String name) throws EvalException, InterruptedException { return CallUtils.getAnnotatedField(semantics, x, name); } /** * Returns the names of the fields of Starlark value {@code x}, as defined by Java methods with * {@code StarlarkMethod(structField=true)} annotations under the specified semantics. * * <p>Most callers should use {@link #dir} instead. */ public static ImmutableSet<String> getAnnotatedFieldNames(StarlarkSemantics semantics, Object x) { return CallUtils.getAnnotatedFieldNames(semantics, x); } /** * Returns a map of Java methods and corresponding StarlarkMethod annotations for each annotated * Java method of the specified class. Elements are ordered by Java method name, which is not * necessarily the same as the Starlark attribute name. The set of enabled methods is determined * by {@link StarlarkSemantics#DEFAULT}. Excludes the {@code selfCall} method, if any. * * <p>Most callers should use {@link #dir} and {@link #getattr} instead. */ // TODO(adonovan): move to StarlarkInterfaceUtils; it's a static property of the annotations. public static ImmutableMap<Method, StarlarkMethod> getMethodAnnotations(Class<?> clazz) { ImmutableMap.Builder<Method, StarlarkMethod> result = ImmutableMap.builder(); for (MethodDescriptor desc : CallUtils.getAnnotatedMethods(StarlarkSemantics.DEFAULT, clazz).values()) { result.put(desc.getMethod(), desc.getAnnotation()); } return result.build(); } /** * Returns the {@code StarlarkMethod(selfCall=true)}-annotated Java method of the specified Java * class that is called when Starlark calls an instance of that class like a function. It returns * null if no such method exists. */ @Nullable public static Method getSelfCallMethod(StarlarkSemantics semantics, Class<?> clazz) { return CallUtils.getSelfCallMethod(semantics, clazz); } /** Equivalent to {@code addMethods(env, v, StarlarkSemantics.DEFAULT)}. */ public static void addMethods(ImmutableMap.Builder<String, Object> env, Object v) { addMethods(env, v, StarlarkSemantics.DEFAULT); } /** * Adds to the environment {@code env} all Starlark methods of value {@code v}, filtered by the * given semantics. Starlark methods are Java methods of {@code v} with a {@link StarlarkMethod} * annotation whose {@code structField} and {@code selfCall} flags are both false. * * @throws IllegalArgumentException if any method annotation's {@link StarlarkMethod#structField} * flag is true. */ public static void addMethods( ImmutableMap.Builder<String, Object> env, Object v, StarlarkSemantics semantics) { Class<?> cls = v.getClass(); // TODO(adonovan): rather than silently skip the selfCall method, reject it. for (Map.Entry<String, MethodDescriptor> e : CallUtils.getAnnotatedMethods(semantics, cls).entrySet()) { String name = e.getKey(); // We cannot accept fields, as they are inherently problematic: // what if the Java method call fails, or gets interrupted? if (e.getValue().isStructField()) { throw new IllegalArgumentException( String.format("addMethods(%s): method %s has structField=true", cls.getName(), name)); } // We use the 2-arg (desc=null) BuiltinCallable constructor instead of passing // the descriptor that CallUtils.getAnnotatedMethod would return, // because most calls to addMethods implicitly pass StarlarkSemantics.DEFAULT, // which is probably the wrong semantics for the later call. // // The effect is that the default semantics determine which method names are // statically available in the environment, but the thread's semantics determine // the dynamic behavior of the method call; this includes a run-time check for // whether the method was disabled by the semantics. env.put(name, new BuiltinCallable(v, name)); } } /** * Adds to the environment {@code env} the value {@code v}, under its annotated name. The class of * {@code v} must have or inherit a {@link StarlarkBuiltin} annotation. */ public static void addModule(ImmutableMap.Builder<String, Object> env, Object v) { Class<?> cls = v.getClass(); StarlarkBuiltin annot = StarlarkInterfaceUtils.getStarlarkBuiltin(cls); if (annot == null) { throw new IllegalArgumentException(cls.getName() + " is not annotated with @StarlarkBuiltin"); } env.put(annot.name(), v); } /** * Parses the input as a file, resolves it in the specified module environment, compiles it, and * executes it in the specified thread. On success it returns None, unless the file's final * statement is an expression, in which case its value is returned. * * @throws SyntaxError.Exception if there were (static) scanner, parser, or resolver errors. * @throws EvalException if there was a (dynamic) evaluation error. * @throws InterruptedException if the Java thread was interrupted during evaluation. */ public static Object execFile( ParserInput input, FileOptions options, Module module, StarlarkThread thread) throws SyntaxError.Exception, EvalException, InterruptedException { StarlarkFile file = StarlarkFile.parse(input, options); Program prog = Program.compileFile(file, module); return execFileProgram(prog, module, thread); } /** Variant of {@link #execFile} that creates a module for the given predeclared environment. */ // TODO(adonovan): is this needed? public static Object execFile( ParserInput input, FileOptions options, Map<String, Object> predeclared, StarlarkThread thread) throws SyntaxError.Exception, EvalException, InterruptedException { Module module = Module.withPredeclared(thread.getSemantics(), predeclared); return execFile(input, options, module, thread); } /** * Executes a compiled Starlark file (as obtained from {@link Program#compileFile}) in the given * StarlarkThread. On success it returns None, unless the file's final statement is an expression, * in which case its value is returned. * * @throws EvalException if there was a (dynamic) evaluation error. * @throws InterruptedException if the Java thread was interrupted during evaluation. */ public static Object execFileProgram(Program prog, Module module, StarlarkThread thread) throws EvalException, InterruptedException { Tuple<Object> defaultValues = Tuple.empty(); StarlarkFunction toplevel = new StarlarkFunction(prog.getResolvedFunction(), defaultValues, module); return Starlark.fastcall(thread, toplevel, NOARGS, NOARGS); } private static final Object[] NOARGS = {}; /** * Parses the input as an expression, resolves it in the specified module environment, compiles * it, evaluates it, and returns its value. * * @throws SyntaxError.Exception if there were (static) scanner, parser, or resolver errors. * @throws EvalException if there was a (dynamic) evaluation error. * @throws InterruptedException if the Java thread was interrupted during evaluation. */ public static Object eval( ParserInput input, FileOptions options, Module module, StarlarkThread thread) throws SyntaxError.Exception, EvalException, InterruptedException { StarlarkFunction fn = newExprFunction(input, options, module); return Starlark.fastcall(thread, fn, NOARGS, NOARGS); } /** Variant of {@link #eval} that creates a module for the given predeclared environment. */ // TODO(adonovan): is this needed? public static Object eval( ParserInput input, FileOptions options, Map<String, Object> predeclared, StarlarkThread thread) throws SyntaxError.Exception, EvalException, InterruptedException { Module module = Module.withPredeclared(thread.getSemantics(), predeclared); return eval(input, options, module, thread); } /** * Parses the input as an expression, resolves it in the specified module environment, and returns * a callable no-argument Starlark function value that computes and returns the value of the * expression. * * @throws SyntaxError.Exception if there were scanner, parser, or resolver errors. */ public static StarlarkFunction newExprFunction( ParserInput input, FileOptions options, Module module) throws SyntaxError.Exception { Expression expr = Expression.parse(input, options); Program prog = Program.compileExpr(expr, module, options); Tuple<Object> defaultValues = Tuple.empty(); return new StarlarkFunction(prog.getResolvedFunction(), defaultValues, module); } /** * Starts the CPU profiler with the specified sampling period, writing a pprof profile to {@code * out}. All running Starlark threads are profiled. May be called concurrent with Starlark * execution. * * @throws IllegalStateException exception if the Starlark profiler is already running or if the * operating system's profiling resources for this process are already in use. */ public static void startCpuProfile(OutputStream out, Duration period) { CpuProfiler.start(out, period); } /** * Stops the profiler and waits for the log to be written. Throws an unchecked exception if the * profiler was not already started by a prior call to {@link #startCpuProfile}. */ public static void stopCpuProfile() throws IOException { CpuProfiler.stop(); } }
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.bgpio.types; import java.util.ArrayList; import java.util.List; import java.util.Objects; import org.jboss.netty.buffer.ChannelBuffer; import org.onosproject.bgpio.exceptions.BgpParseException; import org.onosproject.bgpio.util.Constants; import org.onosproject.bgpio.util.Validation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.MoreObjects; /** * Provides Implementation of AsPath mandatory BGP Path Attribute. */ public class AsPath implements BgpValueType { /** * Enum to provide AS types. */ public enum ASTYPE { AS_SET(1), AS_SEQUENCE(2), AS_CONFED_SEQUENCE(3), AS_CONFED_SET(4); int value; /** * Assign val with the value as the AS type. * * @param val AS type */ ASTYPE(int val) { value = val; } /** * Returns value of AS type. * * @return AS type */ public byte type() { return (byte) value; } } private static final Logger log = LoggerFactory.getLogger(AsPath.class); public static final byte ASPATH_TYPE = 2; public static final byte ASPATH_SET_TYPE = 1; public static final byte ASPATH_SEQ_TYPE = 2; public static final byte ASNUM_SIZE = 2; private boolean isAsPath = false; private List<Short> aspathSet; private List<Short> aspathSeq; /** * Initialize Fields. */ public AsPath() { this.aspathSeq = null; this.aspathSet = null; } /** * Constructor to initialize parameters. * * @param aspathSet ASpath Set type * @param aspathSeq ASpath Sequence type */ public AsPath(List<Short> aspathSet, List<Short> aspathSeq) { this.aspathSeq = aspathSeq; this.aspathSet = aspathSet; this.isAsPath = true; } /** * Reads from the channel buffer and parses AsPath. * * @param cb ChannelBuffer * @return object of AsPath * @throws BgpParseException while parsing AsPath */ public static AsPath read(ChannelBuffer cb) throws BgpParseException { List<Short> aspathSet = new ArrayList<>(); List<Short> aspathSeq = new ArrayList<>(); ChannelBuffer tempCb = cb.copy(); Validation validation = Validation.parseAttributeHeader(cb); if (cb.readableBytes() < validation.getLength()) { Validation.validateLen(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.ATTRIBUTE_LENGTH_ERROR, validation.getLength()); } //if fourth bit is set, length is read as short otherwise as byte , len includes type, length and value int len = validation.isShort() ? validation.getLength() + Constants.TYPE_AND_LEN_AS_SHORT : validation .getLength() + Constants.TYPE_AND_LEN_AS_BYTE; ChannelBuffer data = tempCb.readBytes(len); if (validation.getFirstBit() && !validation.getSecondBit() && validation.getThirdBit()) { throw new BgpParseException(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.ATTRIBUTE_FLAGS_ERROR, data); } ChannelBuffer tempBuf = cb.readBytes(validation.getLength()); while (tempBuf.readableBytes() > 0) { byte pathSegType = tempBuf.readByte(); //no of ASes byte pathSegLen = tempBuf.readByte(); int length = pathSegLen * ASNUM_SIZE; if (tempBuf.readableBytes() < length) { Validation.validateLen(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.ATTRIBUTE_LENGTH_ERROR, length); } ChannelBuffer aspathBuf = tempBuf.readBytes(length); while (aspathBuf.readableBytes() > 0) { short asNum; asNum = aspathBuf.readShort(); switch (pathSegType) { case ASPATH_SET_TYPE: aspathSet.add(asNum); break; case ASPATH_SEQ_TYPE: aspathSeq.add(asNum); break; default: log.debug("Other type Not Supported:" + pathSegType); } } } return new AsPath(aspathSet, aspathSeq); } @Override public short getType() { return ASPATH_TYPE; } /** * Returns whether ASpath path attribute is present. * * @return whether ASpath path attribute is present */ public boolean isaspathSet() { return this.isAsPath; } /** * Returns list of ASNum in ASpath Sequence. * * @return list of ASNum in ASpath Sequence */ public List<Short> asPathSeq() { return this.aspathSeq; } /** * Returns list of ASNum in ASpath SET. * * @return list of ASNum in ASpath SET */ public List<Short> asPathSet() { return this.aspathSet; } @Override public int hashCode() { return Objects.hash(aspathSet, aspathSeq); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof AsPath) { AsPath other = (AsPath) obj; return Objects.equals(aspathSet, other.aspathSet) && Objects.equals(aspathSeq, other.aspathSeq); } return false; } @Override public String toString() { return MoreObjects.toStringHelper(getClass()) .omitNullValues() .add("aspathSet", aspathSet) .add("aspathSeq", aspathSeq) .toString(); } @Override public int write(ChannelBuffer cb) { //Not required to Implement as of now return 0; } @Override public int compareTo(Object o) { // TODO Auto-generated method stub return 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.vectorized; import io.netty.buffer.ArrowBuf; import org.apache.arrow.vector.*; import org.apache.arrow.vector.complex.*; import org.apache.arrow.vector.holders.NullableVarCharHolder; import org.apache.spark.annotation.Evolving; import org.apache.spark.sql.execution.arrow.ArrowUtils; import org.apache.spark.sql.types.*; import org.apache.spark.unsafe.types.UTF8String; /** * A column vector backed by Apache Arrow. Currently calendar interval type and map type are not * supported. */ @Evolving public final class ArrowColumnVector extends ColumnVector { private final ArrowVectorAccessor accessor; private ArrowColumnVector[] childColumns; @Override public boolean hasNull() { return accessor.getNullCount() > 0; } @Override public int numNulls() { return accessor.getNullCount(); } @Override public void close() { if (childColumns != null) { for (int i = 0; i < childColumns.length; i++) { childColumns[i].close(); childColumns[i] = null; } childColumns = null; } accessor.close(); } @Override public boolean isNullAt(int rowId) { return accessor.isNullAt(rowId); } @Override public boolean getBoolean(int rowId) { return accessor.getBoolean(rowId); } @Override public byte getByte(int rowId) { return accessor.getByte(rowId); } @Override public short getShort(int rowId) { return accessor.getShort(rowId); } @Override public int getInt(int rowId) { return accessor.getInt(rowId); } @Override public long getLong(int rowId) { return accessor.getLong(rowId); } @Override public float getFloat(int rowId) { return accessor.getFloat(rowId); } @Override public double getDouble(int rowId) { return accessor.getDouble(rowId); } @Override public Decimal getDecimal(int rowId, int precision, int scale) { if (isNullAt(rowId)) return null; return accessor.getDecimal(rowId, precision, scale); } @Override public UTF8String getUTF8String(int rowId) { if (isNullAt(rowId)) return null; return accessor.getUTF8String(rowId); } @Override public byte[] getBinary(int rowId) { if (isNullAt(rowId)) return null; return accessor.getBinary(rowId); } @Override public ColumnarArray getArray(int rowId) { if (isNullAt(rowId)) return null; return accessor.getArray(rowId); } @Override public ColumnarMap getMap(int rowId) { throw new UnsupportedOperationException(); } @Override public ArrowColumnVector getChild(int ordinal) { return childColumns[ordinal]; } public ArrowColumnVector(ValueVector vector) { super(ArrowUtils.fromArrowField(vector.getField())); if (vector instanceof BitVector) { accessor = new BooleanAccessor((BitVector) vector); } else if (vector instanceof TinyIntVector) { accessor = new ByteAccessor((TinyIntVector) vector); } else if (vector instanceof SmallIntVector) { accessor = new ShortAccessor((SmallIntVector) vector); } else if (vector instanceof IntVector) { accessor = new IntAccessor((IntVector) vector); } else if (vector instanceof BigIntVector) { accessor = new LongAccessor((BigIntVector) vector); } else if (vector instanceof Float4Vector) { accessor = new FloatAccessor((Float4Vector) vector); } else if (vector instanceof Float8Vector) { accessor = new DoubleAccessor((Float8Vector) vector); } else if (vector instanceof DecimalVector) { accessor = new DecimalAccessor((DecimalVector) vector); } else if (vector instanceof VarCharVector) { accessor = new StringAccessor((VarCharVector) vector); } else if (vector instanceof VarBinaryVector) { accessor = new BinaryAccessor((VarBinaryVector) vector); } else if (vector instanceof DateDayVector) { accessor = new DateAccessor((DateDayVector) vector); } else if (vector instanceof TimeStampMicroTZVector) { accessor = new TimestampAccessor((TimeStampMicroTZVector) vector); } else if (vector instanceof ListVector) { ListVector listVector = (ListVector) vector; accessor = new ArrayAccessor(listVector); } else if (vector instanceof StructVector) { StructVector structVector = (StructVector) vector; accessor = new StructAccessor(structVector); childColumns = new ArrowColumnVector[structVector.size()]; for (int i = 0; i < childColumns.length; ++i) { childColumns[i] = new ArrowColumnVector(structVector.getVectorById(i)); } } else { throw new UnsupportedOperationException(); } } private abstract static class ArrowVectorAccessor { private final ValueVector vector; ArrowVectorAccessor(ValueVector vector) { this.vector = vector; } // TODO: should be final after removing ArrayAccessor workaround boolean isNullAt(int rowId) { return vector.isNull(rowId); } final int getNullCount() { return vector.getNullCount(); } final void close() { vector.close(); } boolean getBoolean(int rowId) { throw new UnsupportedOperationException(); } byte getByte(int rowId) { throw new UnsupportedOperationException(); } short getShort(int rowId) { throw new UnsupportedOperationException(); } int getInt(int rowId) { throw new UnsupportedOperationException(); } long getLong(int rowId) { throw new UnsupportedOperationException(); } float getFloat(int rowId) { throw new UnsupportedOperationException(); } double getDouble(int rowId) { throw new UnsupportedOperationException(); } Decimal getDecimal(int rowId, int precision, int scale) { throw new UnsupportedOperationException(); } UTF8String getUTF8String(int rowId) { throw new UnsupportedOperationException(); } byte[] getBinary(int rowId) { throw new UnsupportedOperationException(); } ColumnarArray getArray(int rowId) { throw new UnsupportedOperationException(); } } private static class BooleanAccessor extends ArrowVectorAccessor { private final BitVector accessor; BooleanAccessor(BitVector vector) { super(vector); this.accessor = vector; } @Override final boolean getBoolean(int rowId) { return accessor.get(rowId) == 1; } } private static class ByteAccessor extends ArrowVectorAccessor { private final TinyIntVector accessor; ByteAccessor(TinyIntVector vector) { super(vector); this.accessor = vector; } @Override final byte getByte(int rowId) { return accessor.get(rowId); } } private static class ShortAccessor extends ArrowVectorAccessor { private final SmallIntVector accessor; ShortAccessor(SmallIntVector vector) { super(vector); this.accessor = vector; } @Override final short getShort(int rowId) { return accessor.get(rowId); } } private static class IntAccessor extends ArrowVectorAccessor { private final IntVector accessor; IntAccessor(IntVector vector) { super(vector); this.accessor = vector; } @Override final int getInt(int rowId) { return accessor.get(rowId); } } private static class LongAccessor extends ArrowVectorAccessor { private final BigIntVector accessor; LongAccessor(BigIntVector vector) { super(vector); this.accessor = vector; } @Override final long getLong(int rowId) { return accessor.get(rowId); } } private static class FloatAccessor extends ArrowVectorAccessor { private final Float4Vector accessor; FloatAccessor(Float4Vector vector) { super(vector); this.accessor = vector; } @Override final float getFloat(int rowId) { return accessor.get(rowId); } } private static class DoubleAccessor extends ArrowVectorAccessor { private final Float8Vector accessor; DoubleAccessor(Float8Vector vector) { super(vector); this.accessor = vector; } @Override final double getDouble(int rowId) { return accessor.get(rowId); } } private static class DecimalAccessor extends ArrowVectorAccessor { private final DecimalVector accessor; DecimalAccessor(DecimalVector vector) { super(vector); this.accessor = vector; } @Override final Decimal getDecimal(int rowId, int precision, int scale) { if (isNullAt(rowId)) return null; return Decimal.apply(accessor.getObject(rowId), precision, scale); } } private static class StringAccessor extends ArrowVectorAccessor { private final VarCharVector accessor; private final NullableVarCharHolder stringResult = new NullableVarCharHolder(); StringAccessor(VarCharVector vector) { super(vector); this.accessor = vector; } @Override final UTF8String getUTF8String(int rowId) { accessor.get(rowId, stringResult); if (stringResult.isSet == 0) { return null; } else { return UTF8String.fromAddress(null, stringResult.buffer.memoryAddress() + stringResult.start, stringResult.end - stringResult.start); } } } private static class BinaryAccessor extends ArrowVectorAccessor { private final VarBinaryVector accessor; BinaryAccessor(VarBinaryVector vector) { super(vector); this.accessor = vector; } @Override final byte[] getBinary(int rowId) { return accessor.getObject(rowId); } } private static class DateAccessor extends ArrowVectorAccessor { private final DateDayVector accessor; DateAccessor(DateDayVector vector) { super(vector); this.accessor = vector; } @Override final int getInt(int rowId) { return accessor.get(rowId); } } private static class TimestampAccessor extends ArrowVectorAccessor { private final TimeStampMicroTZVector accessor; TimestampAccessor(TimeStampMicroTZVector vector) { super(vector); this.accessor = vector; } @Override final long getLong(int rowId) { return accessor.get(rowId); } } private static class ArrayAccessor extends ArrowVectorAccessor { private final ListVector accessor; private final ArrowColumnVector arrayData; ArrayAccessor(ListVector vector) { super(vector); this.accessor = vector; this.arrayData = new ArrowColumnVector(vector.getDataVector()); } @Override final boolean isNullAt(int rowId) { // TODO: Workaround if vector has all non-null values, see ARROW-1948 if (accessor.getValueCount() > 0 && accessor.getValidityBuffer().capacity() == 0) { return false; } else { return super.isNullAt(rowId); } } @Override final ColumnarArray getArray(int rowId) { ArrowBuf offsets = accessor.getOffsetBuffer(); int index = rowId * ListVector.OFFSET_WIDTH; int start = offsets.getInt(index); int end = offsets.getInt(index + ListVector.OFFSET_WIDTH); return new ColumnarArray(arrayData, start, end - start); } } /** * Any call to "get" method will throw UnsupportedOperationException. * * Access struct values in a ArrowColumnVector doesn't use this accessor. Instead, it uses * getStruct() method defined in the parent class. Any call to "get" method in this class is a * bug in the code. * */ private static class StructAccessor extends ArrowVectorAccessor { StructAccessor(StructVector vector) { super(vector); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.transaction.impl; import javax.transaction.xa.Xid; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.io.IOCallback; import org.apache.activemq.artemis.core.io.SequentialFile; import org.apache.activemq.artemis.core.journal.Journal; import org.apache.activemq.artemis.core.journal.JournalLoadInformation; import org.apache.activemq.artemis.core.message.impl.MessageInternal; import org.apache.activemq.artemis.core.paging.PageTransactionInfo; import org.apache.activemq.artemis.core.paging.PagedMessage; import org.apache.activemq.artemis.core.paging.PagingManager; import org.apache.activemq.artemis.core.paging.PagingStore; import org.apache.activemq.artemis.core.paging.cursor.PagePosition; import org.apache.activemq.artemis.core.persistence.GroupingInfo; import org.apache.activemq.artemis.core.persistence.OperationContext; import org.apache.activemq.artemis.core.persistence.QueueBindingInfo; import org.apache.activemq.artemis.core.persistence.StorageManager; import org.apache.activemq.artemis.core.persistence.config.PersistedAddressSetting; import org.apache.activemq.artemis.core.persistence.config.PersistedRoles; import org.apache.activemq.artemis.core.persistence.impl.PageCountPending; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.postoffice.PostOffice; import org.apache.activemq.artemis.core.replication.ReplicationManager; import org.apache.activemq.artemis.core.server.LargeServerMessage; import org.apache.activemq.artemis.core.server.MessageReference; import org.apache.activemq.artemis.core.server.RouteContextList; import org.apache.activemq.artemis.core.server.ServerMessage; import org.apache.activemq.artemis.core.server.group.impl.GroupBinding; import org.apache.activemq.artemis.core.server.impl.JournalLoader; import org.apache.activemq.artemis.core.transaction.ResourceManager; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.core.transaction.TransactionOperation; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.junit.Assert; import org.junit.Test; public class TransactionImplTest extends ActiveMQTestBase { @Test public void testTimeoutAndThenCommitWithARollback() throws Exception { TransactionImpl tx = new TransactionImpl(newXID(), new FakeSM(), 10); Assert.assertTrue(tx.hasTimedOut(System.currentTimeMillis() + 60000, 10)); final AtomicInteger commit = new AtomicInteger(0); final AtomicInteger rollback = new AtomicInteger(0); tx.addOperation(new TransactionOperation() { @Override public void beforePrepare(Transaction tx) throws Exception { } @Override public void afterPrepare(Transaction tx) { } @Override public void beforeCommit(Transaction tx) throws Exception { } @Override public void afterCommit(Transaction tx) { System.out.println("commit..."); commit.incrementAndGet(); } @Override public void beforeRollback(Transaction tx) throws Exception { } @Override public void afterRollback(Transaction tx) { System.out.println("rollback..."); rollback.incrementAndGet(); } @Override public List<MessageReference> getRelatedMessageReferences() { return null; } @Override public List<MessageReference> getListOnConsumer(long consumerID) { return null; } }); for (int i = 0; i < 2; i++) { try { tx.commit(); Assert.fail("Exception expected!"); } catch (ActiveMQException expected) { } } // it should just be ignored! tx.rollback(); Assert.assertEquals(0, commit.get()); Assert.assertEquals(1, rollback.get()); } @Test public void testTimeoutThenRollbackWithRollback() throws Exception { TransactionImpl tx = new TransactionImpl(newXID(), new FakeSM(), 10); Assert.assertTrue(tx.hasTimedOut(System.currentTimeMillis() + 60000, 10)); final AtomicInteger commit = new AtomicInteger(0); final AtomicInteger rollback = new AtomicInteger(0); tx.addOperation(new TransactionOperation() { @Override public void beforePrepare(Transaction tx) throws Exception { } @Override public void afterPrepare(Transaction tx) { } @Override public void beforeCommit(Transaction tx) throws Exception { } @Override public void afterCommit(Transaction tx) { System.out.println("commit..."); commit.incrementAndGet(); } @Override public void beforeRollback(Transaction tx) throws Exception { } @Override public void afterRollback(Transaction tx) { System.out.println("rollback..."); rollback.incrementAndGet(); } @Override public List<MessageReference> getRelatedMessageReferences() { return null; } @Override public List<MessageReference> getListOnConsumer(long consumerID) { return null; } }); tx.rollback(); // This is a case where another failure was detected (In parallel with the TX timeout for instance) tx.markAsRollbackOnly(new ActiveMQException("rollback only again")); tx.rollback(); Assert.assertEquals(0, commit.get()); Assert.assertEquals(1, rollback.get()); } class FakeSM implements StorageManager { @Override public OperationContext getContext() { return null; } @Override public void lineUpContext() { } @Override public void criticalError(Throwable error) { error.printStackTrace(); } @Override public OperationContext newContext(Executor executor) { return null; } @Override public OperationContext newSingleThreadContext() { return null; } @Override public void setContext(OperationContext context) { } @Override public void stop(boolean ioCriticalError) throws Exception { } @Override public void pageClosed(SimpleString storeName, int pageNumber) { } @Override public void pageDeleted(SimpleString storeName, int pageNumber) { } @Override public void pageWrite(PagedMessage message, int pageNumber) { } @Override public void afterCompleteOperations(IOCallback run) { run.done(); } @Override public boolean waitOnOperations(long timeout) throws Exception { return false; } @Override public void waitOnOperations() throws Exception { } @Override public void beforePageRead() throws Exception { } @Override public void afterPageRead() throws Exception { } @Override public ByteBuffer allocateDirectBuffer(int size) { return null; } @Override public void freeDirectBuffer(ByteBuffer buffer) { } @Override public void clearContext() { } @Override public void confirmPendingLargeMessageTX(Transaction transaction, long messageID, long recordID) throws Exception { } @Override public void confirmPendingLargeMessage(long recordID) throws Exception { } @Override public void storeMessage(ServerMessage message) throws Exception { } @Override public void storeReference(long queueID, long messageID, boolean last) throws Exception { } @Override public void deleteMessage(long messageID) throws Exception { } @Override public void storeAcknowledge(long queueID, long messageID) throws Exception { } @Override public void storeCursorAcknowledge(long queueID, PagePosition position) throws Exception { } @Override public void updateDeliveryCount(MessageReference ref) throws Exception { } @Override public void updateScheduledDeliveryTime(MessageReference ref) throws Exception { } @Override public void storeDuplicateID(SimpleString address, byte[] duplID, long recordID) throws Exception { } @Override public void deleteDuplicateID(long recordID) throws Exception { } @Override public void storeMessageTransactional(long txID, ServerMessage message) throws Exception { } @Override public void storeReferenceTransactional(long txID, long queueID, long messageID) throws Exception { } @Override public void storeAcknowledgeTransactional(long txID, long queueID, long messageID) throws Exception { } @Override public void storeCursorAcknowledgeTransactional(long txID, long queueID, PagePosition position) throws Exception { } @Override public void deleteCursorAcknowledgeTransactional(long txID, long ackID) throws Exception { } @Override public void deleteCursorAcknowledge(long ackID) throws Exception { } @Override public void storePageCompleteTransactional(long txID, long queueID, PagePosition position) throws Exception { } @Override public void deletePageComplete(long ackID) throws Exception { } @Override public void updateScheduledDeliveryTimeTransactional(long txID, MessageReference ref) throws Exception { } @Override public void storeDuplicateIDTransactional(long txID, SimpleString address, byte[] duplID, long recordID) throws Exception { } @Override public void updateDuplicateIDTransactional(long txID, SimpleString address, byte[] duplID, long recordID) throws Exception { } @Override public void deleteDuplicateIDTransactional(long txID, long recordID) throws Exception { } @Override public LargeServerMessage createLargeMessage() { return null; } @Override public LargeServerMessage createLargeMessage(long id, MessageInternal message) throws Exception { return null; } @Override public SequentialFile createFileForLargeMessage(long messageID, LargeMessageExtension extension) { return null; } @Override public void prepare(long txID, Xid xid) throws Exception { } @Override public void commit(long txID) throws Exception { } @Override public void commit(long txID, boolean lineUpContext) throws Exception { } @Override public void rollback(long txID) throws Exception { } @Override public void rollbackBindings(long txID) throws Exception { } @Override public void commitBindings(long txID) throws Exception { } @Override public void storePageTransaction(long txID, PageTransactionInfo pageTransaction) throws Exception { } @Override public void updatePageTransaction(long txID, PageTransactionInfo pageTransaction, int depage) throws Exception { } @Override public void updatePageTransaction(PageTransactionInfo pageTransaction, int depage) throws Exception { } @Override public void deletePageTransactional(long recordID) throws Exception { } @Override public JournalLoadInformation loadMessageJournal(PostOffice postOffice, PagingManager pagingManager, ResourceManager resourceManager, Map<Long, QueueBindingInfo> queueInfos, Map<SimpleString, List<Pair<byte[], Long>>> duplicateIDMap, Set<Pair<Long, Long>> pendingLargeMessages, List<PageCountPending> pendingNonTXPageCounter, JournalLoader journalLoader) throws Exception { return null; } @Override public long storeHeuristicCompletion(Xid xid, boolean isCommit) throws Exception { return 0; } @Override public void deleteHeuristicCompletion(long id) throws Exception { } @Override public void addQueueBinding(long tx, Binding binding) throws Exception { } @Override public void deleteQueueBinding(long tx, long queueBindingID) throws Exception { } @Override public JournalLoadInformation loadBindingJournal(List<QueueBindingInfo> queueBindingInfos, List<GroupingInfo> groupingInfos) throws Exception { return null; } @Override public void addGrouping(GroupBinding groupBinding) throws Exception { } @Override public void deleteGrouping(long tx, GroupBinding groupBinding) throws Exception { } @Override public void storeAddressSetting(PersistedAddressSetting addressSetting) throws Exception { } @Override public void deleteAddressSetting(SimpleString addressMatch) throws Exception { } @Override public List<PersistedAddressSetting> recoverAddressSettings() throws Exception { return null; } @Override public void storeSecurityRoles(PersistedRoles persistedRoles) throws Exception { } @Override public void deleteSecurityRoles(SimpleString addressMatch) throws Exception { } @Override public List<PersistedRoles> recoverPersistedRoles() throws Exception { return null; } @Override public long storePageCounter(long txID, long queueID, long value) throws Exception { return 0; } @Override public long storePendingCounter(long queueID, long pageID, int inc) throws Exception { return 0; } @Override public void deleteIncrementRecord(long txID, long recordID) throws Exception { } @Override public void deletePageCounter(long txID, long recordID) throws Exception { } @Override public void deletePendingPageCounter(long txID, long recordID) throws Exception { } @Override public long storePageCounterInc(long txID, long queueID, int add) throws Exception { return 0; } @Override public long storePageCounterInc(long queueID, int add) throws Exception { return 0; } @Override public Journal getBindingsJournal() { return null; } @Override public Journal getMessageJournal() { return null; } @Override public void startReplication(ReplicationManager replicationManager, PagingManager pagingManager, String nodeID, boolean autoFailBack, long initialReplicationSyncTimeout) throws Exception { } @Override public boolean addToPage(PagingStore store, ServerMessage msg, Transaction tx, RouteContextList listCtx) throws Exception { return false; } @Override public void stopReplication() { } @Override public void addBytesToLargeMessage(SequentialFile appendFile, long messageID, byte[] bytes) throws Exception { } @Override public void storeID(long journalID, long id) throws Exception { } @Override public void deleteID(long journalD) throws Exception { } @Override public void readLock() { } @Override public void readUnLock() { } @Override public void persistIdGenerator() { } @Override public void start() throws Exception { } @Override public void stop() throws Exception { } @Override public boolean isStarted() { return false; } @Override public long generateID() { return 0; } @Override public long getCurrentID() { return 0; } } }
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.inferred.freebuilder.processor.property; import static org.inferred.freebuilder.processor.BuilderMethods.addAllMethod; import static org.inferred.freebuilder.processor.BuilderMethods.addMethod; import static org.inferred.freebuilder.processor.BuilderMethods.clearMethod; import static org.inferred.freebuilder.processor.BuilderMethods.getter; import static org.inferred.freebuilder.processor.BuilderMethods.mutator; import static org.inferred.freebuilder.processor.BuilderMethods.removeMethod; import static org.inferred.freebuilder.processor.model.ModelUtils.erasesToAnyOf; import static org.inferred.freebuilder.processor.model.ModelUtils.maybeDeclared; import static org.inferred.freebuilder.processor.model.ModelUtils.maybeUnbox; import static org.inferred.freebuilder.processor.model.ModelUtils.needsSafeVarargs; import static org.inferred.freebuilder.processor.model.ModelUtils.overrides; import static org.inferred.freebuilder.processor.model.ModelUtils.upperBound; import static org.inferred.freebuilder.processor.property.MergeAction.appendingToCollections; import static org.inferred.freebuilder.processor.source.FunctionalType.consumer; import static org.inferred.freebuilder.processor.source.FunctionalType.functionalTypeAcceptedByMethod; import static org.inferred.freebuilder.processor.source.feature.GuavaLibrary.GUAVA; import com.google.common.collect.ImmutableSet; import org.inferred.freebuilder.processor.Datatype; import org.inferred.freebuilder.processor.Declarations; import org.inferred.freebuilder.processor.excerpt.CheckedSet; import org.inferred.freebuilder.processor.source.Excerpt; import org.inferred.freebuilder.processor.source.Excerpts; import org.inferred.freebuilder.processor.source.FunctionalType; import org.inferred.freebuilder.processor.source.LazyName; import org.inferred.freebuilder.processor.source.SourceBuilder; import org.inferred.freebuilder.processor.source.Type; import org.inferred.freebuilder.processor.source.ValueType; import org.inferred.freebuilder.processor.source.Variable; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.Spliterator; import java.util.stream.BaseStream; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; /** * {@link PropertyCodeGenerator} providing fluent methods for {@link Set} properties. */ class SetProperty extends PropertyCodeGenerator { static class Factory implements PropertyCodeGenerator.Factory { @Override public Optional<SetProperty> create(Config config) { DeclaredType type = maybeDeclared(config.getProperty().getType()).orElse(null); if (!erasesToAnyOf(type, Set.class, ImmutableSet.class)) { return Optional.empty(); } TypeMirror elementType = upperBound(config.getElements(), type.getTypeArguments().get(0)); Optional<TypeMirror> unboxedType = maybeUnbox(elementType, config.getTypes()); boolean needsSafeVarargs = needsSafeVarargs(unboxedType.orElse(elementType)); boolean overridesAddMethod = hasAddMethodOverride(config, unboxedType.orElse(elementType)); boolean overridesVarargsAddMethod = hasVarargsAddMethodOverride(config, unboxedType.orElse(elementType)); FunctionalType mutatorType = functionalTypeAcceptedByMethod( config.getBuilder(), mutator(config.getProperty()), consumer(wildcardSuperSet(elementType, config.getElements(), config.getTypes())), config.getElements(), config.getTypes()); return Optional.of(new SetProperty( config.getDatatype(), config.getProperty(), elementType, unboxedType, mutatorType, needsSafeVarargs, overridesAddMethod, overridesVarargsAddMethod)); } private static boolean hasAddMethodOverride(Config config, TypeMirror elementType) { return overrides( config.getBuilder(), config.getTypes(), addMethod(config.getProperty()), elementType); } private static boolean hasVarargsAddMethodOverride(Config config, TypeMirror elementType) { return overrides( config.getBuilder(), config.getTypes(), addMethod(config.getProperty()), config.getTypes().getArrayType(elementType)); } private static TypeMirror wildcardSuperSet( TypeMirror elementType, Elements elements, Types types) { TypeElement setType = elements.getTypeElement(Set.class.getName()); return types.getWildcardType(null, types.getDeclaredType(setType, elementType)); } } private final TypeMirror elementType; private final Optional<TypeMirror> unboxedType; private final FunctionalType mutatorType; private final boolean needsSafeVarargs; private final boolean overridesAddMethod; private final boolean overridesVarargsAddMethod; SetProperty( Datatype datatype, Property property, TypeMirror elementType, Optional<TypeMirror> unboxedType, FunctionalType mutatorType, boolean needsSafeVarargs, boolean overridesAddMethod, boolean overridesVarargsAddMethod) { super(datatype, property); this.elementType = elementType; this.unboxedType = unboxedType; this.mutatorType = mutatorType; this.needsSafeVarargs = needsSafeVarargs; this.overridesAddMethod = overridesAddMethod; this.overridesVarargsAddMethod = overridesVarargsAddMethod; } @Override public void addValueFieldDeclaration(SourceBuilder code) { code.addLine("private final %s<%s> %s;", code.feature(GUAVA).isAvailable() ? ImmutableSet.class : Set.class, elementType, property.getField()); } @Override public void addBuilderFieldDeclaration(SourceBuilder code) { if (code.feature(GUAVA).isAvailable()) { code.addLine("private %s<%s> %s = %s.of();", Set.class, elementType, property.getField(), ImmutableSet.class); } else { code.addLine("private final %1$s<%2$s> %3$s = new %1$s<>();", LinkedHashSet.class, elementType, property.getField()); } } @Override public void addBuilderFieldAccessors(SourceBuilder code) { addAdd(code); addVarargsAdd(code); addSpliteratorAddAll(code); addStreamAddAll(code); addIterableAddAll(code); addRemove(code); addMutator(code); addClear(code); addGetter(code); } private void addAdd(SourceBuilder code) { code.addLine("") .addLine("/**") .addLine(" * Adds {@code element} to the set to be returned from %s.", datatype.getType().javadocNoArgMethodLink(property.getGetterName())) .addLine(" * If the set already contains {@code element}, then {@code %s}", addMethod(property)) .addLine(" * has no effect (only the previously added element is retained).") .addLine(" *") .addLine(" * @return this {@code %s} object", datatype.getBuilder().getSimpleName()); if (!unboxedType.isPresent()) { code.addLine(" * @throws NullPointerException if {@code element} is null"); } code.addLine(" */") .addLine("public %s %s(%s element) {", datatype.getBuilder(), addMethod(property), unboxedType.orElse(elementType)); if (code.feature(GUAVA).isAvailable()) { code.addLine(" if (%s instanceof %s) {", property.getField(), ImmutableSet.class) .addLine(" %1$s = new %2$s<>(%1$s);", property.getField(), LinkedHashSet.class) .addLine(" }"); } if (unboxedType.isPresent()) { code.addLine(" %s.add(element);", property.getField()); } else { code.addLine(" %s.add(%s.requireNonNull(element));", property.getField(), Objects.class); } code.addLine(" return (%s) this;", datatype.getBuilder()) .addLine("}"); } private void addVarargsAdd(SourceBuilder code) { code.addLine("") .addLine("/**") .addLine(" * Adds each element of {@code elements} to the set to be returned from") .addLine(" * %s, ignoring duplicate elements", datatype.getType().javadocNoArgMethodLink(property.getGetterName())) .addLine(" * (only the first duplicate element is added).") .addLine(" *") .addLine(" * @return this {@code %s} object", datatype.getBuilder().getSimpleName()); if (!unboxedType.isPresent()) { code.addLine(" * @throws NullPointerException if {@code elements} is null or contains a") .addLine(" * null element"); } code.addLine(" */"); if (needsSafeVarargs) { if (!overridesVarargsAddMethod) { code.addLine("@%s", SafeVarargs.class) .addLine("@%s({\"varargs\"})", SuppressWarnings.class); } else { code.addLine("@%s({\"unchecked\", \"varargs\"})", SuppressWarnings.class); } } code.add("public "); if (needsSafeVarargs && !overridesVarargsAddMethod) { code.add("final "); } code.add("%s %s(%s... elements) {\n", datatype.getBuilder(), addMethod(property), unboxedType.orElse(elementType)); Optional<Class<?>> arrayUtils = code.feature(GUAVA).arrayUtils(unboxedType.orElse(elementType)); if (arrayUtils.isPresent()) { code.addLine(" return %s(%s.asList(elements));", addAllMethod(property), arrayUtils.get()); } else { // Primitive type, Guava not available code.addLine(" for (%s element : elements) {", elementType) .addLine(" %s(element);", addMethod(property)) .addLine(" }") .addLine(" return (%s) this;", datatype.getBuilder()); } code.addLine("}"); } private void addSpliteratorAddAll(SourceBuilder code) { addJavadocForAddAll(code); code.addLine("public %s %s(%s<? extends %s> elements) {", datatype.getBuilder(), addAllMethod(property), Spliterator.class, elementType) .addLine(" elements.forEachRemaining(this::%s);", addMethod(property)) .addLine(" return (%s) this;", datatype.getBuilder()) .addLine("}"); } private void addStreamAddAll(SourceBuilder code) { addJavadocForAddAll(code); code.addLine("public %s %s(%s<? extends %s, ?> elements) {", datatype.getBuilder(), addAllMethod(property), BaseStream.class, elementType) .addLine(" return %s(elements.spliterator());", addAllMethod(property)) .addLine("}"); } private void addIterableAddAll(SourceBuilder code) { addJavadocForAddAll(code); addAccessorAnnotations(code); code.addLine("public %s %s(%s<? extends %s> elements) {", datatype.getBuilder(), addAllMethod(property), Iterable.class, elementType) .addLine(" elements.forEach(this::%s);", addMethod(property)) .addLine(" return (%s) this;", datatype.getBuilder()) .addLine("}"); } private void addJavadocForAddAll(SourceBuilder code) { code.addLine("") .addLine("/**") .addLine(" * Adds each element of {@code elements} to the set to be returned from") .addLine(" * %s, ignoring duplicate elements", datatype.getType().javadocNoArgMethodLink(property.getGetterName())) .addLine(" * (only the first duplicate element is added).") .addLine(" *") .addLine(" * @return this {@code %s} object", datatype.getBuilder().getSimpleName()) .addLine(" * @throws NullPointerException if {@code elements} is null or contains a") .addLine(" * null element") .addLine(" */"); } private void addRemove(SourceBuilder code) { code.addLine("") .addLine("/**") .addLine(" * Removes {@code element} from the set to be returned from %s.", datatype.getType().javadocNoArgMethodLink(property.getGetterName())) .addLine(" * Does nothing if {@code element} is not a member of the set.") .addLine(" *") .addLine(" * @return this {@code %s} object", datatype.getBuilder().getSimpleName()); if (!unboxedType.isPresent()) { code.addLine(" * @throws NullPointerException if {@code element} is null"); } code.addLine(" */") .addLine("public %s %s(%s element) {", datatype.getBuilder(), removeMethod(property), unboxedType.orElse(elementType)); if (code.feature(GUAVA).isAvailable()) { code.addLine(" if (%s instanceof %s) {", property.getField(), ImmutableSet.class) .addLine(" %1$s = new %2$s<>(%1$s);", property.getField(), LinkedHashSet.class) .addLine(" }"); } if (unboxedType.isPresent()) { code.addLine(" %s.remove(element);", property.getField()); } else { code.addLine(" %s.remove(%s.requireNonNull(element));", property.getField(), Objects.class); } code.addLine(" return (%s) this;", datatype.getBuilder()) .addLine("}"); } private void addMutator(SourceBuilder code) { code.addLine("") .addLine("/**") .addLine(" * Applies {@code mutator} to the set to be returned from %s.", datatype.getType().javadocNoArgMethodLink(property.getGetterName())) .addLine(" *") .addLine(" * <p>This method mutates the set in-place. {@code mutator} is a void") .addLine(" * consumer, so any value returned from a lambda will be ignored. Take care") .addLine(" * not to call pure functions, like %s.", Type.from(Collection.class).javadocNoArgMethodLink("stream")) .addLine(" *") .addLine(" * @return this {@code Builder} object") .addLine(" * @throws NullPointerException if {@code mutator} is null") .addLine(" */") .addLine("public %s %s(%s mutator) {", datatype.getBuilder(), mutator(property), mutatorType.getFunctionalInterface()); if (code.feature(GUAVA).isAvailable()) { code.addLine(" if (%s instanceof %s) {", property.getField(), ImmutableSet.class) .addLine(" %1$s = new %2$s<>(%1$s);", property.getField(), LinkedHashSet.class) .addLine(" }"); } if (overridesAddMethod) { code.addLine(" mutator.%s(new %s<%s>(%s, this::%s));", mutatorType.getMethodName(), CheckedSet.TYPE, elementType, property.getField(), addMethod(property)); } else { code.addLine(" // If %s is overridden, this method will be updated to delegate to it", addMethod(property)) .addLine(" mutator.%s(%s);", mutatorType.getMethodName(), property.getField()); } code.addLine(" return (%s) this;", datatype.getBuilder()) .addLine("}"); } private void addClear(SourceBuilder code) { code.addLine("") .addLine("/**") .addLine(" * Clears the set to be returned from %s.", datatype.getType().javadocNoArgMethodLink(property.getGetterName())) .addLine(" *") .addLine(" * @return this {@code %s} object", datatype.getBuilder().getSimpleName()) .addLine(" */") .addLine("public %s %s() {", datatype.getBuilder(), clearMethod(property)); if (code.feature(GUAVA).isAvailable()) { code.addLine("if (%s instanceof %s) {", property.getField(), ImmutableSet.class) .addLine(" %s = %s.of();", property.getField(), ImmutableSet.class) .addLine("} else {"); } code.addLine("%s.clear();", property.getField()); if (code.feature(GUAVA).isAvailable()) { code.addLine("}"); } code.addLine(" return (%s) this;", datatype.getBuilder()) .addLine("}"); } private void addGetter(SourceBuilder code) { code.addLine("") .addLine("/**") .addLine(" * Returns an unmodifiable view of the set that will be returned by") .addLine(" * %s.", datatype.getType().javadocNoArgMethodLink(property.getGetterName())) .addLine(" * Changes to this builder will be reflected in the view.") .addLine(" */") .addLine("public %s<%s> %s() {", Set.class, elementType, getter(property)); if (code.feature(GUAVA).isAvailable()) { code.addLine(" if (%s instanceof %s) {", property.getField(), ImmutableSet.class) .addLine(" %1$s = new %2$s<>(%1$s);", property.getField(), LinkedHashSet.class) .addLine(" }"); } code.addLine(" return %s.unmodifiableSet(%s);", Collections.class, property.getField()) .addLine("}"); } @Override public void addFinalFieldAssignment(SourceBuilder code, Excerpt finalField, String builder) { Excerpt immutableSetMethod; if (code.feature(GUAVA).isAvailable()) { immutableSetMethod = Excerpts.add("%s.copyOf", ImmutableSet.class); } else { immutableSetMethod = ImmutableSetMethod.REFERENCE; } code.addLine("%s = %s(%s);", finalField, immutableSetMethod, property.getField().on(builder)); } @Override public void addAssignToBuilder(SourceBuilder code, Variable builder) { if (code.feature(GUAVA).isAvailable()) { code.addLine("%s = %s;", property.getField().on(builder), property.getField()); } else { code.addLine("%s.addAll(%s);", property.getField().on(builder), property.getField()); } } @Override public void addMergeFromValue(SourceBuilder code, String value) { if (code.feature(GUAVA).isAvailable()) { code.addLine("if (%s instanceof %s && %s == %s.<%s>of()) {", value, datatype.getValueType().getQualifiedName(), property.getField(), ImmutableSet.class, elementType) .addLine(" %s = %s.copyOf(%s.%s());", property.getField(), ImmutableSet.class, value, property.getGetterName()) .addLine("} else {"); } code.addLine("%s(%s.%s());", addAllMethod(property), value, property.getGetterName()); if (code.feature(GUAVA).isAvailable()) { code.addLine("}"); } } @Override public void addMergeFromBuilder(SourceBuilder code, String builder) { Excerpt base = Declarations.upcastToGeneratedBuilder(code, datatype, builder); code.addLine("%s(%s);", addAllMethod(property), property.getField().on(base)); } @Override public Set<MergeAction> getMergeActions() { return ImmutableSet.of(appendingToCollections()); } @Override public void addSetFromResult(SourceBuilder code, Excerpt builder, Excerpt variable) { code.addLine("%s.%s(%s);", builder, addAllMethod(property), variable); } @Override public void addClearField(SourceBuilder code) { code.addLine("%s();", clearMethod(property)); } private static class ImmutableSetMethod extends ValueType implements Excerpt { static final LazyName REFERENCE = LazyName.of("immutableSet", new ImmutableSetMethod()); private ImmutableSetMethod() {} @Override public void addTo(SourceBuilder code) { code.addLine("") .addLine("private static <E> %1$s<E> %2$s(%1$s<E> elements) {", Set.class, REFERENCE) .addLine(" switch (elements.size()) {") .addLine(" case 0:") .addLine(" return %s.emptySet();", Collections.class) .addLine(" case 1:") .addLine(" return %s.singleton(elements.iterator().next());", Collections.class) .addLine(" default:") .addLine(" return %s.unmodifiableSet(new %s<>(elements));", Collections.class, LinkedHashSet.class) .addLine(" }") .addLine("}"); } @Override protected void addFields(FieldReceiver fields) {} } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.cq.internal.command; import java.io.IOException; import java.util.Set; import org.apache.logging.log4j.Logger; import org.apache.geode.cache.operations.ExecuteCQOperationContext; import org.apache.geode.cache.query.CqException; import org.apache.geode.cache.query.Query; import org.apache.geode.cache.query.cq.internal.CqServiceImpl; import org.apache.geode.cache.query.cq.internal.ServerCQImpl; import org.apache.geode.cache.query.internal.DefaultQuery; import org.apache.geode.cache.query.internal.DefaultQueryService; import org.apache.geode.cache.query.internal.cq.CqServiceProvider; import org.apache.geode.distributed.internal.DistributionStats; import org.apache.geode.internal.cache.tier.Acceptor; import org.apache.geode.internal.cache.tier.CachedRegionHelper; import org.apache.geode.internal.cache.tier.Command; import org.apache.geode.internal.cache.tier.MessageType; import org.apache.geode.internal.cache.tier.sockets.CacheClientNotifier; import org.apache.geode.internal.cache.tier.sockets.CacheClientProxy; import org.apache.geode.internal.cache.tier.sockets.CacheServerStats; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.cache.tier.sockets.Message; import org.apache.geode.internal.cache.tier.sockets.Part; import org.apache.geode.internal.cache.tier.sockets.ServerConnection; import org.apache.geode.internal.cache.vmotion.VMotionObserver; import org.apache.geode.internal.cache.vmotion.VMotionObserverHolder; import org.apache.geode.internal.security.AuthorizeRequest; import org.apache.geode.internal.security.SecurityService; import org.apache.geode.logging.internal.log4j.api.LogService; import org.apache.geode.security.ResourcePermission.Operation; import org.apache.geode.security.ResourcePermission.Resource; /** * @since GemFire 6.1 */ public class ExecuteCQ61 extends BaseCQCommand { protected static final Logger logger = LogService.getLogger(); private static final ExecuteCQ61 singleton = new ExecuteCQ61(); public static Command getCommand() { return singleton; } private ExecuteCQ61() { // nothing } @Override public void cmdExecute(final Message clientMessage, final ServerConnection serverConnection, final SecurityService securityService, long start) throws IOException, InterruptedException { Acceptor acceptor = serverConnection.getAcceptor(); CachedRegionHelper crHelper = serverConnection.getCachedRegionHelper(); ClientProxyMembershipID id = serverConnection.getProxyID(); CacheServerStats stats = serverConnection.getCacheServerStats(); serverConnection.setAsTrue(REQUIRES_RESPONSE); serverConnection.setAsTrue(REQUIRES_CHUNKED_RESPONSE); // Retrieve the data from the message parts String cqName = clientMessage.getPart(0).getString(); String cqQueryString = clientMessage.getPart(1).getString(); int cqState = clientMessage.getPart(2).getInt(); Part isDurablePart = clientMessage.getPart(3); byte[] isDurableByte = isDurablePart.getSerializedForm(); boolean isDurable = !(isDurableByte == null || isDurableByte[0] == 0); // region data policy Part regionDataPolicyPart = clientMessage.getPart(clientMessage.getNumberOfParts() - 1); byte[] regionDataPolicyPartBytes = regionDataPolicyPart.getSerializedForm(); if (logger.isDebugEnabled()) { logger.debug("{}: Received {} request from {} CqName: {} queryString: {}", serverConnection.getName(), MessageType.getString(clientMessage.getMessageType()), serverConnection.getSocketString(), cqName, cqQueryString); } // Check if the Server is running in NotifyBySubscription=true mode. CacheClientNotifier ccn = acceptor.getCacheClientNotifier(); if (ccn != null) { CacheClientProxy proxy = ccn.getClientProxy(id); if (proxy != null && !proxy.isNotifyBySubscription()) { // This should have been taken care at the client. String err = "Server notifyBySubscription mode is set to false. CQ execution is not supported in this mode."; sendCqResponse(MessageType.CQDATAERROR_MSG_TYPE, err, clientMessage.getTransactionId(), null, serverConnection); return; } } DefaultQueryService qService; CqServiceImpl cqServiceForExec; Query query; Set cqRegionNames; ExecuteCQOperationContext executeCQContext = null; ServerCQImpl cqQuery; try { qService = (DefaultQueryService) crHelper.getCache().getLocalQueryService(); // Authorization check AuthorizeRequest authzRequest = serverConnection.getAuthzRequest(); query = qService.newQuery(cqQueryString); cqRegionNames = ((DefaultQuery) query).getRegionsInQuery(null); if (authzRequest != null) { executeCQContext = authzRequest.executeCQAuthorize(cqName, cqQueryString, cqRegionNames); String newCqQueryString = executeCQContext.getQuery(); if (!cqQueryString.equals(newCqQueryString)) { query = qService.newQuery(newCqQueryString); cqQueryString = newCqQueryString; cqRegionNames = executeCQContext.getRegionNames(); if (cqRegionNames == null) { cqRegionNames = ((DefaultQuery) query).getRegionsInQuery(null); } } } // auth check to see if user can create CQ or not ((DefaultQuery) query).getRegionsInQuery(null).forEach((regionName) -> securityService .authorize(Resource.DATA, Operation.READ, regionName)); // test hook to trigger vMotion during CQ registration if (CqServiceProvider.VMOTION_DURING_CQ_REGISTRATION_FLAG) { VMotionObserver vmo = VMotionObserverHolder.getInstance(); vmo.vMotionBeforeCQRegistration(); } cqServiceForExec = (CqServiceImpl) qService.getCqService(); // registering cq with serverConnection so that when CCP will require auth info it can access // that // registering cq auth before as possibility that you may get event serverConnection.setCq(cqName, isDurable); cqQuery = (ServerCQImpl) cqServiceForExec.executeCq(cqName, cqQueryString, cqState, id, ccn, isDurable, true, regionDataPolicyPartBytes[0], null); } catch (CqException cqe) { sendCqResponse(MessageType.CQ_EXCEPTION_TYPE, "", clientMessage.getTransactionId(), cqe, serverConnection); serverConnection.removeCq(cqName, isDurable); return; } catch (Exception e) { writeChunkedException(clientMessage, e, serverConnection); serverConnection.removeCq(cqName, isDurable); return; } boolean sendResults = false; if (clientMessage.getMessageType() == MessageType.EXECUTECQ_WITH_IR_MSG_TYPE) { sendResults = true; } // Execute the query only if it is execute with initial results or // if it is a non PR query with execute query and maintain keys flags set boolean successQuery = false; if (sendResults || CqServiceImpl.EXECUTE_QUERY_DURING_INIT && CqServiceProvider.MAINTAIN_KEYS && !cqQuery.isPR()) { // Execute the query and send the result-set to client. try { if (query == null) { query = qService.newQuery(cqQueryString); cqRegionNames = ((DefaultQuery) query).getRegionsInQuery(null); } ((DefaultQuery) query).setIsCqQuery(true); successQuery = processQuery(clientMessage, query, cqQueryString, cqRegionNames, start, cqQuery, executeCQContext, serverConnection, sendResults, securityService); // Update the CQ statistics. cqQuery.getVsdStats().setCqInitialResultsTime(DistributionStats.getStatTime() - start); stats.incProcessExecuteCqWithIRTime(DistributionStats.getStatTime() - start); // logger.fine("Time spent in execute with initial results :" + // DistributionStats.getStatTime() + ", " + oldstart); } finally { // To handle any exception. // If failure to execute the query, close the CQ. if (!successQuery) { try { cqServiceForExec.closeCq(cqName, id); } catch (Exception ignored) { // Ignore. } } } } else { // Don't execute query for cq.execute and // if it is a PR query with execute query and maintain keys flags not set cqQuery.cqResultKeysInitialized = true; successQuery = true; } if (!sendResults && successQuery) { // Send OK to client sendCqResponse(MessageType.REPLY, "cq created successfully.", clientMessage.getTransactionId(), null, serverConnection); long start2 = DistributionStats.getStatTime(); stats.incProcessCreateCqTime(start2 - start); } serverConnection.setAsTrue(RESPONDED); } }
/* * This file is part of Flow Engine, licensed under the MIT License (MIT). * * Copyright (c) 2013 Spout LLC <http://www.spout.org/> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.flowpowered.api.material; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicReference; import com.flowpowered.api.Server; import com.flowpowered.api.material.block.BlockFullState; import com.flowpowered.api.util.SyncedStringMap; import com.flowpowered.commons.store.BinaryFileStore; import com.flowpowered.commons.store.MemoryStore; import com.flowpowered.math.GenericMath; /** * Statically handles all server-side registered materials. */ public abstract class MaterialRegistry { private final static ConcurrentHashMap<String, Material> nameLookup = new ConcurrentHashMap<>(1000); private final static int MAX_SIZE = 1 << 16; @SuppressWarnings ({"unchecked", "rawtypes"}) private final static AtomicReference<Material[]>[] materialLookup = new AtomicReference[MAX_SIZE]; private static boolean setup = false; private static SyncedStringMap materialRegistry; private final static Material[] NULL_MATERIAL_ARRAY = new Material[] {null}; static { for (int i = 0; i < materialLookup.length; i++) { materialLookup[i] = new AtomicReference<>(); materialLookup[i].set(NULL_MATERIAL_ARRAY); } } public static void setupServer(Server server) { if (setup) { throw new IllegalStateException("Can not setup material registry twice!"); } Path serverItemMap = server.getWorldManager().getWorldFolder().resolve("materials.dat"); BinaryFileStore store = new BinaryFileStore(serverItemMap); materialRegistry = SyncedStringMap.create(null, store, 1, Short.MAX_VALUE, Material.class.getName()); if (Files.exists(serverItemMap)) { store.load(); } setup = true; } public static void setupClient() { if (setup) { throw new IllegalStateException("Can not setup material registry twice!"); } materialRegistry = SyncedStringMap.create(null, new MemoryStore<>(), 1, Short.MAX_VALUE, Material.class.getName()); setup = true; } /** * Checks whether the MaterialRegistry has been set up. If this returns {@code true}, further attempts to set up the registry will result in {@link IllegalStateException}. * * @return whether the MaterialRegistry has been set up */ public static boolean isSetup() { return setup; } /** * Registers the material in the material lookup service * * @param material to register * @return id of the material registered */ protected static int register(Material material) { if (material.isSubMaterial()) { material.getParentMaterial().registerSubMaterial(material); nameLookup.put(formatName(material.getDisplayName()), material); return material.getParentMaterial().getId(); } else { int id = materialRegistry.register(material.getName()); Material[] subArray = new Material[] {material}; if (!materialLookup[id].compareAndSet(NULL_MATERIAL_ARRAY, subArray)) { throw new IllegalArgumentException(materialLookup[id].get() + " is already mapped to id: " + material.getId() + "!"); } nameLookup.put(formatName(material.getDisplayName()), material); return id; } } protected static AtomicReference<Material[]> getSubMaterialReference(short id) { return materialLookup[id]; } /** * Registers the material in the material lookup service * * @param material to register * @return id of the material registered. */ protected static int register(Material material, int id) { materialRegistry.register(material.getName(), id); Material[] subArray = new Material[] {material}; if (!materialLookup[id].compareAndSet(NULL_MATERIAL_ARRAY, subArray)) { throw new IllegalArgumentException(materialLookup[id].get()[0] + " is already mapped to id: " + material.getId() + "!"); } nameLookup.put(formatName(material.getName()), material); return id; } /** * Gets the material from the given id * * @param id to get * @return material or null if none found */ public static Material get(short id) { if (id < 0 || id >= materialLookup.length) { return null; } return materialLookup[id].get()[0]; } /** * Gets the material from the given id and data * * @param id to get * @param data to get * @return material or null if none found */ public static Material get(short id, short data) { if (id < 0 || id >= materialLookup.length) { return null; } Material[] parent = materialLookup[id].get(); if (parent[0] == null) { return null; } data &= parent[0].getDataMask(); return materialLookup[id].get()[data]; } /** * Gets the material for the given BlockFullState * * @param state the full state of the block * @return Material of the BlockFullState */ public static Material get(BlockFullState state) { return get(state.getPacked()); } /** * Gets the material for the given packed full state * * @param packedState the packed state of the block * @return Material of the id */ public static BlockMaterial get(int packedState) { short id = BlockFullState.getId(packedState); if (id < 0 || id >= materialLookup.length) { return null; } Material[] material = materialLookup[id].get(); if (material[0] == null) { return null; } return (BlockMaterial) material[BlockFullState.getData(packedState) & (material[0].getDataMask())]; } /** * Returns all current materials in the game * * @return an array of all materials */ public static Material[] values() { //TODO: This is wrong, need to count # of registered materials HashSet<Material> set = new HashSet<>(1000); for (AtomicReference<Material[]> aMaterialLookup : materialLookup) { if (aMaterialLookup.get() != null) { set.add(aMaterialLookup.get()[0]); } } return set.toArray(new Material[0]); } /** * Gets the associated material with its name. Case-insensitive. * * @param name to lookup * @return material, or null if none found */ public static Material get(String name) { return nameLookup.get(formatName(name)); } /** * Returns a human legible material name from the full material. * * This will strip any '_' and replace with spaces, strip out extra whitespace, and lowercase the material name. * * @return human legible name of the material. */ private static String formatName(String matName) { return matName.trim().replaceAll(" ", "_").toLowerCase(); } /** * Gets the minimum data mask required to account for all sub-materials of the material * * @param m the material * @return the minimum data mask */ public static short getMinimumDatamask(Material m) { Material root = m; while (root.isSubMaterial()) { root = m.getParentMaterial(); } if (root.getData() != 0) { throw new IllegalStateException("Root materials must have data set to zero"); } Material[] subMaterials = root.getSubMaterials(); short minimumMask = 0; for (Material sm : subMaterials) { minimumMask |= sm.getData() & 0xFFFF; } if (m.hasLSBDataMask()) { minimumMask = (short) (GenericMath.roundUpPow2(minimumMask + 1) - 1); } return minimumMask; } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableList; import com.google.schemaorg.JsonLdContext; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.PopularityScoreSpecification; import javax.annotation.Nullable; /** Interface of <a href="http://schema.org/MedicalSign}">http://schema.org/MedicalSign}</a>. */ public interface MedicalSign extends MedicalSignOrSymptom { /** * Builder interface of <a * href="http://schema.org/MedicalSign}">http://schema.org/MedicalSign}</a>. */ public interface Builder extends MedicalSignOrSymptom.Builder { @Override Builder addJsonLdContext(@Nullable JsonLdContext context); @Override Builder addJsonLdContext(@Nullable JsonLdContext.Builder context); @Override Builder setJsonLdId(@Nullable String value); @Override Builder setJsonLdReverse(String property, Thing obj); @Override Builder setJsonLdReverse(String property, Thing.Builder builder); /** Add a value to property additionalType. */ Builder addAdditionalType(URL value); /** Add a value to property additionalType. */ Builder addAdditionalType(String value); /** Add a value to property alternateName. */ Builder addAlternateName(Text value); /** Add a value to property alternateName. */ Builder addAlternateName(String value); /** Add a value to property cause. */ Builder addCause(MedicalCause value); /** Add a value to property cause. */ Builder addCause(MedicalCause.Builder value); /** Add a value to property cause. */ Builder addCause(String value); /** Add a value to property code. */ Builder addCode(MedicalCode value); /** Add a value to property code. */ Builder addCode(MedicalCode.Builder value); /** Add a value to property code. */ Builder addCode(String value); /** Add a value to property description. */ Builder addDescription(Text value); /** Add a value to property description. */ Builder addDescription(String value); /** Add a value to property guideline. */ Builder addGuideline(MedicalGuideline value); /** Add a value to property guideline. */ Builder addGuideline(MedicalGuideline.Builder value); /** Add a value to property guideline. */ Builder addGuideline(String value); /** Add a value to property identifyingExam. */ Builder addIdentifyingExam(PhysicalExam value); /** Add a value to property identifyingExam. */ Builder addIdentifyingExam(String value); /** Add a value to property identifyingTest. */ Builder addIdentifyingTest(MedicalTest value); /** Add a value to property identifyingTest. */ Builder addIdentifyingTest(MedicalTest.Builder value); /** Add a value to property identifyingTest. */ Builder addIdentifyingTest(String value); /** Add a value to property image. */ Builder addImage(ImageObject value); /** Add a value to property image. */ Builder addImage(ImageObject.Builder value); /** Add a value to property image. */ Builder addImage(URL value); /** Add a value to property image. */ Builder addImage(String value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork.Builder value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(URL value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(String value); /** Add a value to property medicineSystem. */ Builder addMedicineSystem(MedicineSystem value); /** Add a value to property medicineSystem. */ Builder addMedicineSystem(String value); /** Add a value to property name. */ Builder addName(Text value); /** Add a value to property name. */ Builder addName(String value); /** Add a value to property possibleTreatment. */ Builder addPossibleTreatment(MedicalTherapy value); /** Add a value to property possibleTreatment. */ Builder addPossibleTreatment(MedicalTherapy.Builder value); /** Add a value to property possibleTreatment. */ Builder addPossibleTreatment(String value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action.Builder value); /** Add a value to property potentialAction. */ Builder addPotentialAction(String value); /** Add a value to property recognizingAuthority. */ Builder addRecognizingAuthority(Organization value); /** Add a value to property recognizingAuthority. */ Builder addRecognizingAuthority(Organization.Builder value); /** Add a value to property recognizingAuthority. */ Builder addRecognizingAuthority(String value); /** Add a value to property relevantSpecialty. */ Builder addRelevantSpecialty(MedicalSpecialty value); /** Add a value to property relevantSpecialty. */ Builder addRelevantSpecialty(String value); /** Add a value to property sameAs. */ Builder addSameAs(URL value); /** Add a value to property sameAs. */ Builder addSameAs(String value); /** Add a value to property study. */ Builder addStudy(MedicalStudy value); /** Add a value to property study. */ Builder addStudy(MedicalStudy.Builder value); /** Add a value to property study. */ Builder addStudy(String value); /** Add a value to property url. */ Builder addUrl(URL value); /** Add a value to property url. */ Builder addUrl(String value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article.Builder value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(String value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification.Builder value); /** Add a value to property popularityScore. */ Builder addPopularityScore(String value); /** * Add a value to property. * * @param name The property name. * @param value The value of the property. */ Builder addProperty(String name, SchemaOrgType value); /** * Add a value to property. * * @param name The property name. * @param builder The schema.org object builder for the property value. */ Builder addProperty(String name, Thing.Builder builder); /** * Add a value to property. * * @param name The property name. * @param value The string value of the property. */ Builder addProperty(String name, String value); /** Build a {@link MedicalSign} object. */ MedicalSign build(); } /** * Returns the value list of property identifyingExam. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getIdentifyingExamList(); /** * Returns the value list of property identifyingTest. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getIdentifyingTestList(); }
/* * Copyright (c) 2015 Layer. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.layer.atlas; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Typeface; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.helper.ItemTouchHelper; import android.util.AttributeSet; import android.view.View; import com.layer.atlas.adapters.AtlasMessagesAdapter; import com.layer.atlas.messagetypes.AtlasCellFactory; import com.layer.atlas.messagetypes.MessageStyle; import com.layer.atlas.provider.ParticipantProvider; import com.layer.atlas.util.itemanimators.NoChangeAnimator; import com.layer.atlas.util.views.SwipeableItem; import com.layer.sdk.LayerClient; import com.layer.sdk.messaging.Conversation; import com.layer.sdk.messaging.Message; import com.layer.sdk.query.Predicate; import com.layer.sdk.query.Query; import com.layer.sdk.query.SortDescriptor; import com.squareup.picasso.Picasso; public class AtlasMessagesRecyclerView extends RecyclerView { private AtlasMessagesAdapter mAdapter; private LinearLayoutManager mLayoutManager; private ItemTouchHelper mSwipeItemTouchHelper; private MessageStyle mMessageStyle; public AtlasMessagesRecyclerView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); parseStyle(getContext(), attrs, defStyle); } public AtlasMessagesRecyclerView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public AtlasMessagesRecyclerView(Context context) { super(context); } public AtlasMessagesRecyclerView init(LayerClient layerClient, ParticipantProvider participantProvider, Picasso picasso) { mLayoutManager = new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false); mLayoutManager.setStackFromEnd(true); setLayoutManager(mLayoutManager); // Create an adapter that auto-scrolls if we're already at the bottom mAdapter = new AtlasMessagesAdapter(getContext(), layerClient, participantProvider, picasso) .setRecyclerView(this) .setOnMessageAppendListener(new AtlasMessagesAdapter.OnMessageAppendListener() { @Override public void onMessageAppend(AtlasMessagesAdapter adapter, Message message) { autoScroll(); } }); mAdapter.setStyle(mMessageStyle); super.setAdapter(mAdapter); // Don't flash items when changing content setItemAnimator(new NoChangeAnimator()); addOnScrollListener(new OnScrollListener() { @Override public void onScrollStateChanged(RecyclerView recyclerView, int newState) { for (AtlasCellFactory factory : mAdapter.getCellFactories()) { factory.onScrollStateChanged(newState); } } }); return this; } @Override public void setAdapter(Adapter adapter) { throw new RuntimeException("AtlasMessagesRecyclerView sets its own Adapter"); } /** * Automatically refresh on resume */ @Override protected void onVisibilityChanged(View changedView, int visibility) { super.onVisibilityChanged(changedView, visibility); if (visibility != View.VISIBLE) return; refresh(); } public AtlasMessagesRecyclerView refresh() { if (mAdapter != null) mAdapter.refresh(); return this; } /** * Updates the underlying AtlasMessagesAdapter with a Query for Messages in the given * Conversation. * * @param conversation Conversation to display Messages for. * @return This AtlasMessagesRecyclerView. */ public AtlasMessagesRecyclerView setConversation(Conversation conversation) { mAdapter.setQuery(Query.builder(Message.class) .predicate(new Predicate(Message.Property.CONVERSATION, Predicate.Operator.EQUAL_TO, conversation)) .sortDescriptor(new SortDescriptor(Message.Property.POSITION, SortDescriptor.Order.ASCENDING)) .build()).refresh(); return this; } public AtlasMessagesRecyclerView setOnMessageSwipeListener(SwipeableItem.OnSwipeListener<Message> listener) { if (mSwipeItemTouchHelper != null) { mSwipeItemTouchHelper.attachToRecyclerView(null); } if (listener == null) { mSwipeItemTouchHelper = null; } else { listener.setBaseAdapter((AtlasMessagesAdapter) getAdapter()); mSwipeItemTouchHelper = new ItemTouchHelper(listener); mSwipeItemTouchHelper.attachToRecyclerView(this); } return this; } /** * Convenience pass-through to this list's AtlasMessagesAdapter. * * @see AtlasMessagesAdapter#addCellFactories(AtlasCellFactory...) */ public AtlasMessagesRecyclerView addCellFactories(AtlasCellFactory... cellFactories) { mAdapter.addCellFactories(cellFactories); return this; } public AtlasMessagesRecyclerView setTextTypeface(Typeface myTypeface, Typeface otherTypeface) { mMessageStyle.setMyTextTypeface(myTypeface); mMessageStyle.setOtherTextTypeface(otherTypeface); return this; } /** * Convenience pass-through to this list's LinearLayoutManager. * * @see LinearLayoutManager#findLastVisibleItemPosition() */ private int findLastVisibleItemPosition() { return mLayoutManager.findLastVisibleItemPosition(); } /** * Convenience pass-through to this list's AtlasMessagesAdapter. * * @see AtlasMessagesAdapter#setFooterView(View) */ public AtlasMessagesRecyclerView setFooterView(View footerView) { mAdapter.setFooterView(footerView); autoScroll(); return this; } /** * Convenience pass-through to this list's AtlasMessagesAdapter. * * @see AtlasMessagesAdapter#getFooterView() */ public View getFooterView() { return mAdapter.getFooterView(); } /** * Scrolls if the user is at the end */ private void autoScroll() { int end = mAdapter.getItemCount() - 1; if (end <= 0) return; int visible = findLastVisibleItemPosition(); // -3 because -1 seems too finicky if (visible >= (end - 3)) scrollToPosition(end); } public void parseStyle(Context context, AttributeSet attrs, int defStyle) { TypedArray ta = context.getTheme().obtainStyledAttributes(attrs, R.styleable.AtlasMessagesRecyclerView, R.attr.AtlasMessagesRecyclerView, defStyle); MessageStyle.Builder messageStyleBuilder = new MessageStyle.Builder(); messageStyleBuilder.myTextColor(ta.getColor(R.styleable.AtlasMessagesRecyclerView_myTextColor, context.getResources().getColor(R.color.atlas_text_black))); int myTextStyle = ta.getInt(R.styleable.AtlasMessagesRecyclerView_myTextStyle, Typeface.NORMAL); messageStyleBuilder.myTextStyle(myTextStyle); String myTextTypefaceName = ta.getString(R.styleable.AtlasMessagesRecyclerView_myTextTypeface); messageStyleBuilder.myTextTypeface(myTextTypefaceName != null ? Typeface.create(myTextTypefaceName, myTextStyle) : null); messageStyleBuilder.myTextSize(ta.getDimensionPixelSize(R.styleable.AtlasMessagesRecyclerView_myTextSize, context.getResources().getDimensionPixelSize(R.dimen.atlas_text_size_message_item))); messageStyleBuilder.otherTextColor(ta.getColor(R.styleable.AtlasMessagesRecyclerView_theirTextColor, context.getResources().getColor(R.color.atlas_color_primary_blue))); int otherTextStyle = ta.getInt(R.styleable.AtlasMessagesRecyclerView_theirTextStyle, Typeface.NORMAL); messageStyleBuilder.otherTextStyle(otherTextStyle); String otherTextTypefaceName = ta.getString(R.styleable.AtlasMessagesRecyclerView_theirTextTypeface); messageStyleBuilder.otherTextTypeface(otherTextTypefaceName != null ? Typeface.create(otherTextTypefaceName, otherTextStyle) : null); messageStyleBuilder.otherTextSize(ta.getDimensionPixelSize(R.styleable.AtlasMessagesRecyclerView_theirTextSize, context.getResources().getDimensionPixelSize(R.dimen.atlas_text_size_message_item))); messageStyleBuilder.myBubbleColor(ta.getColor(R.styleable.AtlasMessagesRecyclerView_myBubbleColor, context.getResources().getColor(R.color.atlas_color_primary_blue))); messageStyleBuilder.otherBubbleColor(ta.getColor(R.styleable.AtlasMessagesRecyclerView_theirBubbleColor, context.getResources().getColor(R.color.atlas_color_primary_gray))); ta.recycle(); this.mMessageStyle = messageStyleBuilder.build(); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.actionSystem.ex.AnActionListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ApplicationComponent; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.ui.popup.BalloonBuilder; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.registry.RegistryValue; import com.intellij.openapi.util.registry.RegistryValueListener; import com.intellij.ui.*; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.components.panels.Wrapper; import com.intellij.util.Alarm; import com.intellij.util.IJSwingUtilities; import com.intellij.util.ui.Html; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import javax.swing.text.*; import javax.swing.text.html.HTML; import javax.swing.text.html.HTMLEditorKit; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.AWTEventListener; import java.awt.event.MouseEvent; public class IdeTooltipManager implements ApplicationComponent, AWTEventListener { public static final String IDE_TOOLTIP_PLACE = "IdeTooltip"; public static final Color GRAPHITE_COLOR = new Color(100, 100, 100, 230); private RegistryValue myIsEnabled; private Component myCurrentComponent; private Component myQueuedComponent; private BalloonImpl myCurrentTipUi; private MouseEvent myCurrentEvent; private boolean myCurrentTipIsCentered; private Runnable myHideRunnable; private final JBPopupFactory myPopupFactory; private boolean myShowDelay = true; private final Alarm myAlarm = new Alarm(); private int myX; private int myY; private IdeTooltip myCurrentTooltip; private Runnable myShowRequest; private IdeTooltip myQueuedTooltip; public IdeTooltipManager(JBPopupFactory popupFactory) { myPopupFactory = popupFactory; } @Override public void initComponent() { myIsEnabled = Registry.get("ide.tooltip.callout"); myIsEnabled.addListener(new RegistryValueListener.Adapter() { @Override public void afterValueChanged(RegistryValue value) { processEnabled(); } }, ApplicationManager.getApplication()); Toolkit.getDefaultToolkit().addAWTEventListener(this, AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_MOTION_EVENT_MASK); ActionManager.getInstance().addAnActionListener(new AnActionListener.Adapter() { @Override public void beforeActionPerformed(AnAction action, DataContext dataContext, AnActionEvent event) { hideCurrent(null, action, event); } }, ApplicationManager.getApplication()); processEnabled(); } @Override public void eventDispatched(AWTEvent event) { if (!myIsEnabled.asBoolean()) return; MouseEvent me = (MouseEvent)event; Component c = me.getComponent(); if (me.getID() == MouseEvent.MOUSE_ENTERED) { boolean canShow = true; if (c != myCurrentComponent) { canShow = hideCurrent(me, null, null); } if (canShow) { maybeShowFor(c, me); } } else if (me.getID() == MouseEvent.MOUSE_EXITED) { if (c == myCurrentComponent || c == myQueuedComponent) { hideCurrent(me, null, null); } } else if (me.getID() == MouseEvent.MOUSE_MOVED) { if (c == myCurrentComponent || c == myQueuedComponent) { if (myCurrentTipUi != null && myCurrentTipUi.wasFadedIn()) { if (hideCurrent(me, null, null)) { maybeShowFor(c, me); } } else { if (!myCurrentTipIsCentered) { myX = me.getX(); myY = me.getY(); if (c instanceof JComponent && ((JComponent)c).getToolTipText(me) == null && (myQueuedTooltip == null || !myQueuedTooltip.isHint())) { hideCurrent(me, null, null);//There is no tooltip or hint here, let's proceed it as MOUSE_EXITED } else { maybeShowFor(c, me); } } } } else if (myCurrentComponent == null && myQueuedComponent == null) { maybeShowFor(c, me); } } else if (me.getID() == MouseEvent.MOUSE_PRESSED) { if (c == myCurrentComponent) { hideCurrent(me, null, null); } } else if (me.getID() == MouseEvent.MOUSE_DRAGGED) { hideCurrent(me, null, null); } } private void maybeShowFor(Component c, MouseEvent me) { if (!(c instanceof JComponent)) return; JComponent comp = (JComponent)c; Window wnd = SwingUtilities.getWindowAncestor(comp); if (wnd == null) return; if (!wnd.isActive()) { if (JBPopupFactory.getInstance().isChildPopupFocused(wnd)) return; } String tooltipText = comp.getToolTipText(me); if (tooltipText == null || tooltipText.trim().isEmpty()) return; boolean centerDefault = Boolean.TRUE.equals(comp.getClientProperty(UIUtil.CENTER_TOOLTIP_DEFAULT)); boolean centerStrict = Boolean.TRUE.equals(comp.getClientProperty(UIUtil.CENTER_TOOLTIP_STRICT)); int shift = centerStrict ? 0 : centerDefault ? 4 : 0; // Balloon may appear exactly above useful content, such behavior is rather annoying. if (c instanceof JTree) { TreePath path = ((JTree)c).getClosestPathForLocation(me.getX(), me.getY()); if (path != null) { Rectangle pathBounds = ((JTree)c).getPathBounds(path); if (pathBounds != null && pathBounds.y + 4 < me.getY()) { shift += me.getY() - pathBounds.y - 4; } } } queueShow(comp, me, centerStrict || centerDefault, shift, -shift, -shift); } private void queueShow(final JComponent c, final MouseEvent me, final boolean toCenter, int shift, int posChangeX, int posChangeY) { String aText = String.valueOf(c.getToolTipText(me)); final IdeTooltip tooltip = new IdeTooltip(c, me.getPoint(), null, /*new Object()*/c, aText) { @Override protected boolean beforeShow() { myCurrentEvent = me; if (!c.isShowing()) return false; String text = c.getToolTipText(myCurrentEvent); if (text == null || text.trim().isEmpty()) return false; JLayeredPane layeredPane = IJSwingUtilities.findParentOfType(c, JLayeredPane.class); final JEditorPane pane = initPane(text, new HintHint(me).setAwtTooltip(true), layeredPane); final Wrapper wrapper = new Wrapper(pane); setTipComponent(wrapper); return true; } }.setToCenter(toCenter).setCalloutShift(shift).setPositionChangeShift(posChangeX, posChangeY).setLayer(Balloon.Layer.top); show(tooltip, false); } public IdeTooltip show(final IdeTooltip tooltip, boolean now) { return show(tooltip, now, true); } public IdeTooltip show(final IdeTooltip tooltip, boolean now, final boolean animationEnabled) { myAlarm.cancelAllRequests(); hideCurrent(null, tooltip, null, null); myQueuedComponent = tooltip.getComponent(); myQueuedTooltip = tooltip; myShowRequest = new Runnable() { @Override public void run() { if (myShowRequest == null) { return; } if (myQueuedComponent != tooltip.getComponent() || !tooltip.getComponent().isShowing()) { hideCurrent(null, tooltip, null, null, animationEnabled); return; } if (tooltip.beforeShow()) { show(tooltip, null, animationEnabled); } else { hideCurrent(null, tooltip, null, null, animationEnabled); } } }; if (now) { myShowRequest.run(); } else { myAlarm.addRequest(myShowRequest, myShowDelay ? tooltip.getShowDelay() : tooltip.getInitialReshowDelay()); } return tooltip; } private void show(final IdeTooltip tooltip, @Nullable Runnable beforeShow, boolean animationEnabled) { boolean toCenterX; boolean toCenterY; boolean toCenter = tooltip.isToCenter(); boolean small = false; if (!toCenter && tooltip.isToCenterIfSmall()) { Dimension size = tooltip.getComponent().getSize(); toCenterX = size.width < 64; toCenterY = size.height < 64; toCenter = toCenterX || toCenterY; small = true; } else { toCenterX = true; toCenterY = true; } Point effectivePoint = tooltip.getPoint(); if (toCenter) { Rectangle bounds = tooltip.getComponent().getBounds(); effectivePoint.x = toCenterX ? bounds.width / 2 : effectivePoint.x; effectivePoint.y = toCenterY ? bounds.height / 2 : effectivePoint.y; } if (myCurrentComponent == tooltip.getComponent() && myCurrentTipUi != null && !myCurrentTipUi.isDisposed()) { myCurrentTipUi.show(new RelativePoint(tooltip.getComponent(), effectivePoint), tooltip.getPreferredPosition()); return; } if (myCurrentComponent == tooltip.getComponent() && effectivePoint.equals(new Point(myX, myY))) { return; } Color bg = tooltip.getTextBackground() != null ? tooltip.getTextBackground() : getTextBackground(true); Color fg = tooltip.getTextForeground() != null ? tooltip.getTextForeground() : getTextForeground(true); Color border = tooltip.getBorderColor() != null ? tooltip.getBorderColor() : getBorderColor(true); BalloonBuilder builder = myPopupFactory.createBalloonBuilder(tooltip.getTipComponent()) .setFillColor(bg) .setBorderColor(border) .setBorderInsets(tooltip.getBorderInsets()) .setAnimationCycle(animationEnabled ? Registry.intValue("ide.tooltip.animationCycle") : 0) .setShowCallout(true) .setCalloutShift(small && tooltip.getCalloutShift() == 0 ? 2 : tooltip.getCalloutShift()) .setPositionChangeXShift(tooltip.getPositionChangeX()) .setPositionChangeYShift(tooltip.getPositionChangeY()) .setHideOnKeyOutside(!tooltip.isExplicitClose()) .setHideOnAction(!tooltip.isExplicitClose()) .setLayer(tooltip.getLayer()); tooltip.getTipComponent().setForeground(fg); tooltip.getTipComponent().setBorder(new EmptyBorder(1, 3, 2, 3)); tooltip.getTipComponent().setFont(tooltip.getFont() != null ? tooltip.getFont() : getTextFont(true)); if (beforeShow != null) { beforeShow.run(); } myCurrentTipUi = (BalloonImpl)builder.createBalloon(); myCurrentTipUi.setAnimationEnabled(animationEnabled); tooltip.setUi(myCurrentTipUi); myCurrentComponent = tooltip.getComponent(); myX = effectivePoint.x; myY = effectivePoint.y; myCurrentTipIsCentered = toCenter; myCurrentTooltip = tooltip; myShowRequest = null; myQueuedComponent = null; myQueuedTooltip = null; myCurrentTipUi.show(new RelativePoint(tooltip.getComponent(), effectivePoint), tooltip.getPreferredPosition()); myAlarm.addRequest(new Runnable() { @Override public void run() { if (myCurrentTooltip == tooltip && tooltip.canBeDismissedOnTimeout()) { hideCurrent(null, null, null); } } }, tooltip.getDismissDelay()); } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public Color getTextForeground(boolean awtTooltip) { return UIUtil.getToolTipForeground(); } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public Color getLinkForeground(boolean awtTooltip) { return JBColor.blue; } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public Color getTextBackground(boolean awtTooltip) { return UIUtil.getToolTipBackground(); } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public String getUlImg(boolean awtTooltip) { AllIcons.General.Mdot.getIconWidth(); // keep icon reference return UIUtil.isUnderDarcula() ? "/general/mdot-white.png" : "/general/mdot.png"; } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public Color getBorderColor(boolean awtTooltip) { return new JBColor(Gray._160, new Color(154, 154, 102)); } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public boolean isOwnBorderAllowed(boolean awtTooltip) { return !awtTooltip; } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public boolean isOpaqueAllowed(boolean awtTooltip) { return !awtTooltip; } @SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"}) public Font getTextFont(boolean awtTooltip) { return UIManager.getFont("ToolTip.font"); } public boolean hasCurrent() { return myCurrentTooltip != null; } public boolean hideCurrent(@Nullable MouseEvent me) { return hideCurrent(me, null, null, null); } private boolean hideCurrent(@Nullable MouseEvent me, @Nullable AnAction action, @Nullable AnActionEvent event) { return hideCurrent(me, null, action, event, myCurrentTipUi != null && myCurrentTipUi.isAnimationEnabled()); } private boolean hideCurrent(@Nullable MouseEvent me, @Nullable IdeTooltip tooltipToShow, @Nullable AnAction action, @Nullable AnActionEvent event) { return hideCurrent(me, tooltipToShow, action, event, myCurrentTipUi != null && myCurrentTipUi.isAnimationEnabled()); } private boolean hideCurrent(@Nullable MouseEvent me, @Nullable IdeTooltip tooltipToShow, @Nullable AnAction action, @Nullable AnActionEvent event, final boolean animationEnabled) { if (myCurrentTooltip != null && me != null && myCurrentTooltip.isInside(RelativePoint.fromScreen(me.getLocationOnScreen()))) { if (me.getButton() == MouseEvent.NOBUTTON || myCurrentTipUi == null || myCurrentTipUi.isBlockClicks()) { return false; } } myShowRequest = null; myQueuedComponent = null; myQueuedTooltip = null; if (myCurrentTooltip == null) return true; if (myCurrentTipUi != null) { RelativePoint target = me != null ? new RelativePoint(me) : null; boolean isInside = target != null && myCurrentTipUi.isInside(target); boolean isMovingForward = target != null && myCurrentTipUi.isMovingForward(target); boolean canAutoHide = myCurrentTooltip.canAutohideOn(new TooltipEvent(me, isInside || isMovingForward, action, event)); boolean implicitMouseMove = me != null && (me.getID() == MouseEvent.MOUSE_MOVED || me.getID() == MouseEvent.MOUSE_EXITED || me.getID() == MouseEvent.MOUSE_ENTERED); if (!canAutoHide || (myCurrentTooltip.isExplicitClose() && implicitMouseMove) || (tooltipToShow != null && !tooltipToShow.isHint() && Comparing.equal(myCurrentTooltip, tooltipToShow))) { if (myHideRunnable != null) { myHideRunnable = null; } return false; } } myHideRunnable = new Runnable() { @Override public void run() { if (myHideRunnable != null) { hideCurrentNow(animationEnabled); myHideRunnable = null; } } }; if (me != null && me.getButton() == MouseEvent.NOBUTTON) { myAlarm.addRequest(myHideRunnable, Registry.intValue("ide.tooltip.autoDismissDeadZone")); } else { myHideRunnable.run(); myHideRunnable = null; } return true; } public void hideCurrentNow(boolean animationEnabled) { if (myCurrentTipUi != null) { myCurrentTipUi.setAnimationEnabled(animationEnabled); myCurrentTipUi.hide(); myCurrentTooltip.onHidden(); myShowDelay = false; myAlarm.addRequest(new Runnable() { @Override public void run() { myShowDelay = true; } }, Registry.intValue("ide.tooltip.reshowDelay")); } myShowRequest = null; myCurrentTooltip = null; myCurrentTipUi = null; myCurrentComponent = null; myQueuedComponent = null; myQueuedTooltip = null; myCurrentEvent = null; myCurrentTipIsCentered = false; myX = -1; myY = -1; } private void processEnabled() { if (myIsEnabled.asBoolean()) { ToolTipManager.sharedInstance().setEnabled(false); } else { ToolTipManager.sharedInstance().setEnabled(true); } } @Override public void disposeComponent() { } public static IdeTooltipManager getInstance() { return ApplicationManager.getApplication().getComponent(IdeTooltipManager.class); } public void hide(@Nullable IdeTooltip tooltip) { if (myCurrentTooltip == tooltip || tooltip == null || tooltip == myQueuedTooltip) { hideCurrent(null, null, null); } } public void cancelAutoHide() { myHideRunnable = null; } public static JEditorPane initPane(@NonNls String text, final HintHint hintHint, @Nullable final JLayeredPane layeredPane) { return initPane(new Html(text), hintHint, layeredPane); } public static JEditorPane initPane(@NonNls Html html, final HintHint hintHint, @Nullable final JLayeredPane layeredPane) { final Ref<Dimension> prefSize = new Ref<Dimension>(null); @NonNls String text = HintUtil.prepareHintText(html, hintHint); final boolean[] prefSizeWasComputed = {false}; final JEditorPane pane = new JEditorPane() { @Override public Dimension getPreferredSize() { if (!prefSizeWasComputed[0] && hintHint.isAwtTooltip()) { JLayeredPane lp = layeredPane; if (lp == null) { JRootPane rootPane = UIUtil.getRootPane(this); if (rootPane != null) { lp = rootPane.getLayeredPane(); } } Dimension size; if (lp != null) { size = lp.getSize(); prefSizeWasComputed[0] = true; } else { size = ScreenUtil.getScreenRectangle(0, 0).getSize(); } int fitWidth = (int)(size.width * 0.8); Dimension prefSizeOriginal = super.getPreferredSize(); if (prefSizeOriginal.width > fitWidth) { setSize(new Dimension(fitWidth, Integer.MAX_VALUE)); Dimension fixedWidthSize = super.getPreferredSize(); Dimension minSize = super.getMinimumSize(); prefSize.set(new Dimension(fitWidth > minSize.width ? fitWidth : minSize.width, fixedWidthSize.height)); } else { prefSize.set(new Dimension(prefSizeOriginal)); } } Dimension s = prefSize.get() != null ? new Dimension(prefSize.get()) : super.getPreferredSize(); Border b = getBorder(); if (b != null) { JBInsets.addTo(s, b.getBorderInsets(this)); } return s; } @Override public void setPreferredSize(Dimension preferredSize) { super.setPreferredSize(preferredSize); prefSize.set(preferredSize); } }; final HTMLEditorKit.HTMLFactory factory = new HTMLEditorKit.HTMLFactory() { @Override public View create(Element elem) { AttributeSet attrs = elem.getAttributes(); Object elementName = attrs.getAttribute(AbstractDocument.ElementNameAttribute); Object o = elementName != null ? null : attrs.getAttribute(StyleConstants.NameAttribute); if (o instanceof HTML.Tag) { HTML.Tag kind = (HTML.Tag)o; if (kind == HTML.Tag.HR) { return new CustomHrView(elem, hintHint.getTextForeground()); } } return super.create(elem); } }; HTMLEditorKit kit = new HTMLEditorKit() { @Override public ViewFactory getViewFactory() { return factory; } }; pane.setEditorKit(kit); pane.setText(text); pane.setCaretPosition(0); pane.setEditable(false); if (hintHint.isOwnBorderAllowed()) { setBorder(pane); setColors(pane); } else { pane.setBorder(null); } if (!hintHint.isAwtTooltip()) { prefSizeWasComputed[0] = true; } final boolean opaque = hintHint.isOpaqueAllowed(); pane.setOpaque(opaque); if (UIUtil.isUnderNimbusLookAndFeel() && !opaque) { pane.setBackground(UIUtil.TRANSPARENT_COLOR); } else { pane.setBackground(hintHint.getTextBackground()); } return pane; } public static void setColors(JComponent pane) { pane.setForeground(JBColor.foreground()); pane.setBackground(HintUtil.INFORMATION_COLOR); pane.setOpaque(true); } public static void setBorder(JComponent pane) { pane.setBorder( BorderFactory.createCompoundBorder(BorderFactory.createLineBorder(Color.black), BorderFactory.createEmptyBorder(0, 5, 0, 5))); } @NotNull @Override public String getComponentName() { return "IDE Tooltip Manager"; } public boolean isQueuedToShow(IdeTooltip tooltip) { return Comparing.equal(myQueuedTooltip, tooltip); } }
/* * * * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * */ package org.apache.tinkerpop.gremlin.process.traversal.step.filter; import org.apache.tinkerpop.gremlin.process.traversal.Pop; import org.apache.tinkerpop.gremlin.process.traversal.Step; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.gremlin.process.traversal.step.Scoping; import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalParent; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MapStep; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.ProfileStep; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.StartStep; import org.apache.tinkerpop.gremlin.process.traversal.strategy.decoration.ConjunctionStrategy; import org.apache.tinkerpop.gremlin.process.traversal.traverser.TraverserRequirement; import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper; import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalUtil; import org.apache.tinkerpop.gremlin.structure.util.StringFactory; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class WhereTraversalStep<S> extends FilterStep<S> implements TraversalParent, Scoping { protected Traversal.Admin<?, ?> whereTraversal; protected final Set<String> scopeKeys = new HashSet<>(); public WhereTraversalStep(final Traversal.Admin traversal, final Traversal<?, ?> whereTraversal) { super(traversal); this.whereTraversal = whereTraversal.asAdmin(); this.configureStartAndEndSteps(this.whereTraversal); if (this.scopeKeys.isEmpty()) throw new IllegalArgumentException("A where()-traversal must have at least a start or end label (i.e. variable): " + whereTraversal); this.whereTraversal = this.integrateChild(this.whereTraversal); } private void configureStartAndEndSteps(final Traversal.Admin<?, ?> whereTraversal) { ConjunctionStrategy.instance().apply(whereTraversal); //// START STEP to WhereStartStep final Step<?, ?> startStep = whereTraversal.getStartStep(); if (startStep instanceof ConjunctionStep || startStep instanceof NotStep) { // for conjunction- and not-steps ((TraversalParent) startStep).getLocalChildren().forEach(this::configureStartAndEndSteps); } else if (StartStep.isVariableStartStep(startStep)) { // as("a").out()... traversals final String label = startStep.getLabels().iterator().next(); this.scopeKeys.add(label); TraversalHelper.replaceStep(startStep, new WhereStartStep(whereTraversal, label), whereTraversal); } else if (!whereTraversal.getEndStep().getLabels().isEmpty()) { // ...out().as("a") traversals TraversalHelper.insertBeforeStep(new WhereStartStep(whereTraversal, null), (Step) startStep, whereTraversal); } //// END STEP to WhereEndStep final Step<?, ?> endStep = whereTraversal.getEndStep(); if (!endStep.getLabels().isEmpty()) { if (endStep.getLabels().size() > 1) throw new IllegalArgumentException("The end step of a where()-traversal can only have one label: " + endStep); final String label = endStep.getLabels().iterator().next(); this.scopeKeys.add(label); endStep.removeLabel(label); whereTraversal.addStep(new WhereEndStep(whereTraversal, label)); } } @Override protected boolean filter(final Traverser.Admin<S> traverser) { return TraversalUtil.test((Traverser.Admin) traverser, this.whereTraversal); } @Override public List<Traversal.Admin<?, ?>> getLocalChildren() { return null == this.whereTraversal ? Collections.emptyList() : Collections.singletonList(this.whereTraversal); } @Override public String toString() { return StringFactory.stepString(this, this.whereTraversal); } @Override public Set<String> getScopeKeys() { return Collections.unmodifiableSet(this.scopeKeys); } @Override public WhereTraversalStep<S> clone() { final WhereTraversalStep<S> clone = (WhereTraversalStep<S>) super.clone(); clone.whereTraversal = clone.integrateChild(this.whereTraversal.clone()); return clone; } @Override public int hashCode() { return super.hashCode() ^ this.whereTraversal.hashCode(); } @Override public Set<TraverserRequirement> getRequirements() { return TraversalHelper.getLabels(TraversalHelper.getRootTraversal(this.traversal)).stream().filter(this.scopeKeys::contains).findAny().isPresent() ? TYPICAL_GLOBAL_REQUIREMENTS : TYPICAL_LOCAL_REQUIREMENTS; } ////////////////////////////// public static class WhereStartStep<S> extends MapStep<S, Object> implements Scoping { private String selectKey; public WhereStartStep(final Traversal.Admin traversal, final String selectKey) { super(traversal); this.selectKey = selectKey; } @Override protected Object map(final Traverser.Admin<S> traverser) { if (this.getTraversal().getEndStep() instanceof WhereEndStep) ((WhereEndStep) this.getTraversal().getEndStep()).processStartTraverser(traverser); else if (this.getTraversal().getEndStep() instanceof ProfileStep && this.getTraversal().getEndStep().getPreviousStep() instanceof WhereEndStep) // TOTAL SUCKY HACK! ((WhereEndStep) this.getTraversal().getEndStep().getPreviousStep()).processStartTraverser(traverser); return null == this.selectKey ? traverser.get() : this.getScopeValue(Pop.last, this.selectKey, traverser); } @Override public String toString() { return StringFactory.stepString(this, this.selectKey); } @Override public int hashCode() { return super.hashCode() ^ (null == this.selectKey ? "null".hashCode() : this.selectKey.hashCode()); } public void removeScopeKey() { this.selectKey = null; } @Override public Set<String> getScopeKeys() { return null == this.selectKey ? Collections.emptySet() : Collections.singleton(this.selectKey); } } public static class WhereEndStep extends FilterStep<Object> implements Scoping { private final String matchKey; private Object matchValue = null; public WhereEndStep(final Traversal.Admin traversal, final String matchKey) { super(traversal); this.matchKey = matchKey; } public void processStartTraverser(final Traverser.Admin traverser) { if (null != this.matchKey) this.matchValue = this.getScopeValue(Pop.last, this.matchKey, traverser); } @Override protected boolean filter(final Traverser.Admin<Object> traverser) { return null == this.matchKey || traverser.get().equals(this.matchValue); } @Override public String toString() { return StringFactory.stepString(this, this.matchKey); } @Override public int hashCode() { return super.hashCode() ^ (null == this.matchKey ? "null".hashCode() : this.matchKey.hashCode()); } @Override public Set<String> getScopeKeys() { return null == this.matchKey ? Collections.emptySet() : Collections.singleton(this.matchKey); } } ////////////////////////////// }
/* * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.imagepipeline.animated.factory; import android.graphics.Bitmap; import android.graphics.Rect; import com.facebook.common.references.CloseableReference; import com.facebook.common.references.ResourceReleaser; import com.facebook.common.soloader.SoLoaderShim; import com.facebook.imageformat.ImageFormat; import com.facebook.imagepipeline.animated.base.AnimatedDrawableBackend; import com.facebook.imagepipeline.animated.base.AnimatedImageResult; import com.facebook.imagepipeline.animated.factory.AnimatedImageFactoryImpl; import com.facebook.imagepipeline.animated.impl.AnimatedDrawableBackendProvider; import com.facebook.imagepipeline.animated.impl.AnimatedImageCompositor; import com.facebook.imagepipeline.bitmaps.PlatformBitmapFactory; import com.facebook.imagepipeline.common.ImageDecodeOptions; import com.facebook.imagepipeline.image.CloseableAnimatedImage; import com.facebook.imagepipeline.image.EncodedImage; import com.facebook.imagepipeline.memory.PooledByteBuffer; import com.facebook.imagepipeline.testing.MockBitmapFactory; import com.facebook.imagepipeline.testing.TrivialPooledByteBuffer; import com.facebook.animated.webp.WebPImage; import org.junit.Rule; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.modules.junit4.rule.PowerMockRule; import org.robolectric.RobolectricTestRunner; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareOnlyThisForTest; import static org.junit.Assert.*; import static org.mockito.Mockito.*; /** * Tests for {@link AnimatedImageFactory} */ @RunWith(RobolectricTestRunner.class) @PrepareOnlyThisForTest({ WebPImage.class, AnimatedImageFactoryImpl.class, AnimatedImageCompositor.class}) @PowerMockIgnore({ "org.mockito.*", "org.robolectric.*", "android.*" }) public class AnimatedImageFactoryWebPImplTest { private static final Bitmap.Config DEFAULT_BITMAP_CONFIG = Bitmap.Config.ARGB_8888; @Rule public PowerMockRule rule = new PowerMockRule(); static { SoLoaderShim.setInTestMode(); } private static ResourceReleaser<PooledByteBuffer> FAKE_RESOURCE_RELEASER = new ResourceReleaser<PooledByteBuffer>() { @Override public void release(PooledByteBuffer value) { } }; private static ResourceReleaser<Bitmap> FAKE_BITMAP_RESOURCE_RELEASER = new ResourceReleaser<Bitmap>() { @Override public void release(Bitmap value) { } }; private AnimatedDrawableBackendProvider mMockAnimatedDrawableBackendProvider; private PlatformBitmapFactory mMockBitmapFactory; private AnimatedImageFactory mAnimatedImageFactory; private WebPImage mWebPImageMock; @Before public void setup() { PowerMockito.mockStatic(WebPImage.class); mWebPImageMock = mock(WebPImage.class); mMockAnimatedDrawableBackendProvider = mock(AnimatedDrawableBackendProvider.class); mMockBitmapFactory = mock(PlatformBitmapFactory.class); mAnimatedImageFactory = new AnimatedImageFactoryImpl( mMockAnimatedDrawableBackendProvider, mMockBitmapFactory); ((AnimatedImageFactoryImpl) mAnimatedImageFactory).sWebpAnimatedImageDecoder = mWebPImageMock; } @Test public void testCreateDefaults() { WebPImage mockWebPImage = mock(WebPImage.class); // Expect a call to WebPImage.create TrivialPooledByteBuffer byteBuffer = createByteBuffer(); when(mWebPImageMock.decode(byteBuffer.getNativePtr(), byteBuffer.size())) .thenReturn(mockWebPImage); EncodedImage encodedImage = new EncodedImage( CloseableReference.of(byteBuffer, FAKE_RESOURCE_RELEASER)); encodedImage.setImageFormat(ImageFormat.UNKNOWN); CloseableAnimatedImage closeableImage = (CloseableAnimatedImage) mAnimatedImageFactory.decodeWebP( encodedImage, ImageDecodeOptions.defaults(), DEFAULT_BITMAP_CONFIG); // Verify we got the right result AnimatedImageResult imageResult = closeableImage.getImageResult(); assertSame(mockWebPImage, imageResult.getImage()); assertNull(imageResult.getPreviewBitmap()); assertFalse(imageResult.hasDecodedFrame(0)); // Should not have interacted with these. verifyZeroInteractions(mMockAnimatedDrawableBackendProvider); verifyZeroInteractions(mMockBitmapFactory); } @Test public void testCreateWithPreviewBitmap() throws Exception { WebPImage mockWebPImage = mock(WebPImage.class); Bitmap mockBitmap = MockBitmapFactory.create(50, 50, DEFAULT_BITMAP_CONFIG); // Expect a call to WebPImage.create TrivialPooledByteBuffer byteBuffer = createByteBuffer(); when(mWebPImageMock.decode(byteBuffer.getNativePtr(), byteBuffer.size())) .thenReturn(mockWebPImage); when(mockWebPImage.getWidth()).thenReturn(50); when(mockWebPImage.getHeight()).thenReturn(50); // For decoding preview frame, expect some calls. final AnimatedDrawableBackend mockAnimatedDrawableBackend = createAnimatedDrawableBackendMock(1); when(mMockAnimatedDrawableBackendProvider.get( any(AnimatedImageResult.class), isNull(Rect.class))) .thenReturn(mockAnimatedDrawableBackend); when(mMockBitmapFactory.createBitmap(50, 50, DEFAULT_BITMAP_CONFIG)) .thenReturn(CloseableReference.of(mockBitmap, FAKE_BITMAP_RESOURCE_RELEASER)); AnimatedImageCompositor mockCompositor = mock(AnimatedImageCompositor.class); PowerMockito.whenNew(AnimatedImageCompositor.class) .withAnyArguments() .thenReturn(mockCompositor); ImageDecodeOptions imageDecodeOptions = ImageDecodeOptions.newBuilder() .setDecodePreviewFrame(true) .build(); EncodedImage encodedImage = new EncodedImage( CloseableReference.of(byteBuffer, FAKE_RESOURCE_RELEASER)); encodedImage.setImageFormat(ImageFormat.UNKNOWN); CloseableAnimatedImage closeableImage = (CloseableAnimatedImage) mAnimatedImageFactory.decodeWebP( encodedImage, imageDecodeOptions, DEFAULT_BITMAP_CONFIG); // Verify we got the right result AnimatedImageResult imageResult = closeableImage.getImageResult(); assertSame(mockWebPImage, imageResult.getImage()); assertNotNull(imageResult.getPreviewBitmap()); assertFalse(imageResult.hasDecodedFrame(0)); // Should not have interacted with these. verify(mMockAnimatedDrawableBackendProvider).get( any(AnimatedImageResult.class), isNull(Rect.class)); verifyNoMoreInteractions(mMockAnimatedDrawableBackendProvider); verify(mMockBitmapFactory).createBitmap(50, 50, DEFAULT_BITMAP_CONFIG); verifyNoMoreInteractions(mMockBitmapFactory); verify(mockCompositor).renderFrame(0, mockBitmap); } @Test public void testCreateWithDecodeAlFrames() throws Exception { WebPImage mockWebPImage = mock(WebPImage.class); Bitmap mockBitmap1 = MockBitmapFactory.create(50, 50, DEFAULT_BITMAP_CONFIG); Bitmap mockBitmap2 = MockBitmapFactory.create(50, 50, DEFAULT_BITMAP_CONFIG); // Expect a call to WebPImage.create TrivialPooledByteBuffer byteBuffer = createByteBuffer(); when(mWebPImageMock.decode(byteBuffer.getNativePtr(), byteBuffer.size())) .thenReturn(mockWebPImage); when(mockWebPImage.getWidth()).thenReturn(50); when(mockWebPImage.getHeight()).thenReturn(50); // For decoding preview frame, expect some calls. final AnimatedDrawableBackend mockAnimatedDrawableBackend = createAnimatedDrawableBackendMock(2); when( mMockAnimatedDrawableBackendProvider.get( any(AnimatedImageResult.class), isNull(Rect.class))) .thenReturn(mockAnimatedDrawableBackend); when(mMockBitmapFactory.createBitmap(50, 50, DEFAULT_BITMAP_CONFIG)) .thenReturn(CloseableReference.of(mockBitmap1, FAKE_BITMAP_RESOURCE_RELEASER)) .thenReturn(CloseableReference.of(mockBitmap2, FAKE_BITMAP_RESOURCE_RELEASER)); AnimatedImageCompositor mockCompositor = mock(AnimatedImageCompositor.class); PowerMockito.whenNew(AnimatedImageCompositor.class) .withAnyArguments() .thenReturn(mockCompositor); ImageDecodeOptions imageDecodeOptions = ImageDecodeOptions.newBuilder() .setDecodePreviewFrame(true) .setDecodeAllFrames(true) .build(); EncodedImage encodedImage = new EncodedImage( CloseableReference.of(byteBuffer, FAKE_RESOURCE_RELEASER)); encodedImage.setImageFormat(ImageFormat.UNKNOWN); CloseableAnimatedImage closeableImage = (CloseableAnimatedImage) mAnimatedImageFactory.decodeWebP( encodedImage, imageDecodeOptions, DEFAULT_BITMAP_CONFIG); // Verify we got the right result AnimatedImageResult imageResult = closeableImage.getImageResult(); assertSame(mockWebPImage, imageResult.getImage()); assertNotNull(imageResult.getDecodedFrame(0)); assertNotNull(imageResult.getDecodedFrame(1)); assertNotNull(imageResult.getPreviewBitmap()); // Should not have interacted with these. verify(mMockAnimatedDrawableBackendProvider).get( any(AnimatedImageResult.class), isNull(Rect.class)); verifyNoMoreInteractions(mMockAnimatedDrawableBackendProvider); verify(mMockBitmapFactory, times(2)).createBitmap(50, 50, DEFAULT_BITMAP_CONFIG); verifyNoMoreInteractions(mMockBitmapFactory); verify(mockCompositor).renderFrame(0, mockBitmap1); verify(mockCompositor).renderFrame(1, mockBitmap2); } private TrivialPooledByteBuffer createByteBuffer() { byte[] buf = new byte[16]; return new TrivialPooledByteBuffer(buf); } /** * Creates the mock for the AnimatedDrawableBackend with the number of frame * @param frameCount The number of frame to mock */ private AnimatedDrawableBackend createAnimatedDrawableBackendMock(final int frameCount) { // For decoding preview frame, expect some calls. final AnimatedDrawableBackend mockAnimatedDrawableBackend = mock(AnimatedDrawableBackend.class); when(mockAnimatedDrawableBackend.getFrameCount()).thenReturn(frameCount); when(mockAnimatedDrawableBackend.getWidth()).thenReturn(50); when(mockAnimatedDrawableBackend.getHeight()).thenReturn(50); return mockAnimatedDrawableBackend; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.camel.CamelContext; import org.apache.camel.CamelExecutionException; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.InvalidPayloadException; import org.apache.camel.Message; import org.apache.camel.NoSuchBeanException; import org.apache.camel.NoSuchEndpointException; import org.apache.camel.NoSuchHeaderException; import org.apache.camel.NoSuchPropertyException; import org.apache.camel.NoTypeConversionAvailableException; import org.apache.camel.TypeConverter; import org.apache.camel.spi.UnitOfWork; /** * Some helper methods for working with {@link Exchange} objects * * @version */ public final class ExchangeHelper { /** * Utility classes should not have a public constructor. */ private ExchangeHelper() { } /** * Extracts the Exchange.BINDING of the given type or null if not present * * @param exchange the message exchange * @param type the expected binding type * @return the binding object of the given type or null if it could not be found or converted */ public static <T> T getBinding(Exchange exchange, Class<T> type) { return exchange != null ? exchange.getProperty(Exchange.BINDING, type) : null; } /** * Attempts to resolve the endpoint for the given value * * @param exchange the message exchange being processed * @param value the value which can be an {@link Endpoint} or an object * which provides a String representation of an endpoint via * {@link #toString()} * @return the endpoint * @throws NoSuchEndpointException if the endpoint cannot be resolved */ public static Endpoint resolveEndpoint(Exchange exchange, Object value) throws NoSuchEndpointException { Endpoint endpoint; if (value instanceof Endpoint) { endpoint = (Endpoint) value; } else { String uri = value.toString().trim(); endpoint = CamelContextHelper.getMandatoryEndpoint(exchange.getContext(), uri); } return endpoint; } public static <T> T getMandatoryProperty(Exchange exchange, String propertyName, Class<T> type) throws NoSuchPropertyException { T result = exchange.getProperty(propertyName, type); if (result != null) { return result; } throw new NoSuchPropertyException(exchange, propertyName, type); } public static <T> T getMandatoryHeader(Exchange exchange, String propertyName, Class<T> type) throws NoSuchHeaderException { T answer = exchange.getIn().getHeader(propertyName, type); if (answer == null) { throw new NoSuchHeaderException(exchange, propertyName, type); } return answer; } /** * Returns the mandatory inbound message body of the correct type or throws * an exception if it is not present */ public static Object getMandatoryInBody(Exchange exchange) throws InvalidPayloadException { return exchange.getIn().getMandatoryBody(); } /** * Returns the mandatory inbound message body of the correct type or throws * an exception if it is not present */ public static <T> T getMandatoryInBody(Exchange exchange, Class<T> type) throws InvalidPayloadException { return exchange.getIn().getMandatoryBody(type); } /** * Returns the mandatory outbound message body of the correct type or throws * an exception if it is not present */ public static Object getMandatoryOutBody(Exchange exchange) throws InvalidPayloadException { return exchange.getOut().getMandatoryBody(); } /** * Returns the mandatory outbound message body of the correct type or throws * an exception if it is not present */ public static <T> T getMandatoryOutBody(Exchange exchange, Class<T> type) throws InvalidPayloadException { return exchange.getOut().getMandatoryBody(type); } /** * Converts the value to the given expected type or throws an exception */ public static <T> T convertToMandatoryType(Exchange exchange, Class<T> type, Object value) throws NoTypeConversionAvailableException { CamelContext camelContext = exchange.getContext(); ObjectHelper.notNull(camelContext, "CamelContext of Exchange"); TypeConverter converter = camelContext.getTypeConverter(); if (converter != null) { return converter.mandatoryConvertTo(type, exchange, value); } throw new NoTypeConversionAvailableException(value, type); } /** * Converts the value to the given expected type returning null if it could * not be converted */ public static <T> T convertToType(Exchange exchange, Class<T> type, Object value) { CamelContext camelContext = exchange.getContext(); ObjectHelper.notNull(camelContext, "CamelContext of Exchange"); TypeConverter converter = camelContext.getTypeConverter(); if (converter != null) { return converter.convertTo(type, exchange, value); } return null; } /** * Creates a new instance and copies from the current message exchange so that it can be * forwarded to another destination as a new instance. Unlike regular copy this operation * will not share the same {@link org.apache.camel.spi.UnitOfWork} so its should be used * for async messaging, where the original and copied exchange are independent. * * @param exchange original copy of the exchange * @param handover whether the on completion callbacks should be handed over to the new copy. */ public static Exchange createCorrelatedCopy(Exchange exchange, boolean handover) { String id = exchange.getExchangeId(); Exchange copy = exchange.copy(); // do not share the unit of work copy.setUnitOfWork(null); // hand over on completion to the copy if we got any UnitOfWork uow = exchange.getUnitOfWork(); if (handover && uow != null) { uow.handoverSynchronization(copy); } // set a correlation id so we can track back the original exchange copy.setProperty(Exchange.CORRELATION_ID, id); return copy; } /** * Creates a new instance and copies from the current message exchange so that it can be * forwarded to another destination as a new instance. * * @param exchange original copy of the exchange * @param preserveExchangeId whether or not the exchange id should be preserved * @return the copy */ public static Exchange createCopy(Exchange exchange, boolean preserveExchangeId) { Exchange copy = exchange.copy(); if (preserveExchangeId) { copy.setExchangeId(exchange.getExchangeId()); } return copy; } /** * Copies the results of a message exchange from the source exchange to the result exchange * which will copy the out and fault message contents and the exception * * @param result the result exchange which will have the output and error state added * @param source the source exchange which is not modified */ public static void copyResults(Exchange result, Exchange source) { // -------------------------------------------------------------------- // TODO: merge logic with that of copyResultsPreservePattern() // -------------------------------------------------------------------- if (result != source) { result.setException(source.getException()); if (source.hasOut()) { result.getOut().copyFrom(source.getOut()); } else if (result.getPattern() == ExchangePattern.InOptionalOut) { // special case where the result is InOptionalOut and with no OUT response // so we should return null to indicate this fact result.setOut(null); } else { // no results so lets copy the last input // as the final processor on a pipeline might not // have created any OUT; such as a mock:endpoint // so lets assume the last IN is the OUT if (result.getPattern().isOutCapable()) { // only set OUT if its OUT capable result.getOut().copyFrom(source.getIn()); } else { // if not replace IN instead to keep the MEP result.getIn().copyFrom(source.getIn()); // clear any existing OUT as the result is on the IN if (result.hasOut()) { result.setOut(null); } } } if (source.hasProperties()) { result.getProperties().putAll(source.getProperties()); } } } /** * Copies the <code>source</code> exchange to <code>target</code> exchange * preserving the {@link ExchangePattern} of <code>target</code>. * * @param source source exchange. * @param result target exchange. */ public static void copyResultsPreservePattern(Exchange result, Exchange source) { // -------------------------------------------------------------------- // TODO: merge logic with that of copyResults() // -------------------------------------------------------------------- if (source == result) { // no need to copy return; } // copy in message result.getIn().copyFrom(source.getIn()); // copy out message if (source.hasOut()) { // exchange pattern sensitive Message resultMessage = source.getOut().isFault() ? result.getOut() : getResultMessage(result); resultMessage.copyFrom(source.getOut()); } // copy exception result.setException(source.getException()); // copy properties if (source.hasProperties()) { result.getProperties().putAll(source.getProperties()); } } /** * Returns the message where to write results in an * exchange-pattern-sensitive way. * * @param exchange message exchange. * @return result message. */ public static Message getResultMessage(Exchange exchange) { if (exchange.getPattern().isOutCapable()) { return exchange.getOut(); } else { return exchange.getIn(); } } /** * Returns true if the given exchange pattern (if defined) can support OUT messages * * @param exchange the exchange to interrogate * @return true if the exchange is defined as an {@link ExchangePattern} which supports * OUT messages */ public static boolean isOutCapable(Exchange exchange) { ExchangePattern pattern = exchange.getPattern(); return pattern != null && pattern.isOutCapable(); } /** * Creates a new instance of the given type from the injector */ public static <T> T newInstance(Exchange exchange, Class<T> type) { return exchange.getContext().getInjector().newInstance(type); } /** * Creates a Map of the variables which are made available to a script or template * * @param exchange the exchange to make available * @return a Map populated with the require variables */ public static Map<String, Object> createVariableMap(Exchange exchange) { Map<String, Object> answer = new HashMap<String, Object>(); populateVariableMap(exchange, answer); return answer; } /** * Populates the Map with the variables which are made available to a script or template * * @param exchange the exchange to make available * @param map the map to populate */ public static void populateVariableMap(Exchange exchange, Map<String, Object> map) { map.put("exchange", exchange); Message in = exchange.getIn(); map.put("in", in); map.put("request", in); map.put("headers", in.getHeaders()); map.put("body", in.getBody()); if (isOutCapable(exchange)) { // if we are out capable then set out and response as well // however only grab OUT if it exists, otherwise reuse IN // this prevents side effects to alter the Exchange if we force creating an OUT message Message msg = exchange.hasOut() ? exchange.getOut() : exchange.getIn(); map.put("out", msg); map.put("response", msg); } map.put("camelContext", exchange.getContext()); } /** * Returns the MIME content type on the input message or null if one is not defined */ public static String getContentType(Exchange exchange) { return MessageHelper.getContentType(exchange.getIn()); } /** * Returns the MIME content encoding on the input message or null if one is not defined */ public static String getContentEncoding(Exchange exchange) { return MessageHelper.getContentEncoding(exchange.getIn()); } /** * Performs a lookup in the registry of the mandatory bean name and throws an exception if it could not be found */ public static Object lookupMandatoryBean(Exchange exchange, String name) { Object value = lookupBean(exchange, name); if (value == null) { throw new NoSuchBeanException(name); } return value; } /** * Performs a lookup in the registry of the mandatory bean name and throws an exception if it could not be found */ public static <T> T lookupMandatoryBean(Exchange exchange, String name, Class<T> type) { T value = lookupBean(exchange, name, type); if (value == null) { throw new NoSuchBeanException(name); } return value; } /** * Performs a lookup in the registry of the bean name */ public static Object lookupBean(Exchange exchange, String name) { return exchange.getContext().getRegistry().lookup(name); } /** * Performs a lookup in the registry of the bean name and type */ public static <T> T lookupBean(Exchange exchange, String name, Class<T> type) { return exchange.getContext().getRegistry().lookup(name, type); } /** * Returns the first exchange in the given collection of exchanges which has the same exchange ID as the one given * or null if none could be found */ public static Exchange getExchangeById(Iterable<Exchange> exchanges, String exchangeId) { for (Exchange exchange : exchanges) { String id = exchange.getExchangeId(); if (id != null && id.equals(exchangeId)) { return exchange; } } return null; } /** * Prepares the exchanges for aggregation. * <p/> * This implementation will copy the OUT body to the IN body so when you do * aggregation the body is <b>only</b> in the IN body to avoid confusing end users. * * @param oldExchange the old exchange * @param newExchange the new exchange */ public static void prepareAggregation(Exchange oldExchange, Exchange newExchange) { // move body/header from OUT to IN if (oldExchange != null) { if (oldExchange.hasOut()) { oldExchange.setIn(oldExchange.getOut()); oldExchange.setOut(null); } } if (newExchange != null) { if (newExchange.hasOut()) { newExchange.setIn(newExchange.getOut()); newExchange.setOut(null); } } } public static boolean isFailureHandled(Exchange exchange) { return exchange.getProperty(Exchange.FAILURE_HANDLED, false, Boolean.class); } public static void setFailureHandled(Exchange exchange) { exchange.setProperty(Exchange.FAILURE_HANDLED, Boolean.TRUE); // clear exception since its failure handled exchange.setException(null); } public static boolean isRedeliveryExhausted(Exchange exchange) { return exchange.getProperty(Exchange.REDELIVERY_EXHAUSTED, false, Boolean.class); } public static boolean isInterrupted(Exchange exchange) { return exchange.getException(InterruptedException.class) != null; } /** * Extracts the body from the given exchange. * <p/> * If the exchange pattern is provided it will try to honor it and retrieve the body * from either IN or OUT according to the pattern. * * @param exchange the exchange * @param pattern exchange pattern if given, can be <tt>null</tt> * @return the result body, can be <tt>null</tt>. * @throws CamelExecutionException is thrown if the processing of the exchange failed */ public static Object extractResultBody(Exchange exchange, ExchangePattern pattern) { Object answer = null; if (exchange != null) { // rethrow if there was an exception during execution if (exchange.getException() != null) { throw ObjectHelper.wrapCamelExecutionException(exchange, exchange.getException()); } // result could have a fault message if (hasFaultMessage(exchange)) { return exchange.getOut().getBody(); } // okay no fault then return the response according to the pattern // try to honor pattern if provided boolean notOut = pattern != null && !pattern.isOutCapable(); boolean hasOut = exchange.hasOut(); if (hasOut && !notOut) { // we have a response in out and the pattern is out capable answer = exchange.getOut().getBody(); } else if (!hasOut && exchange.getPattern() == ExchangePattern.InOptionalOut) { // special case where the result is InOptionalOut and with no OUT response // so we should return null to indicate this fact answer = null; } else { // use IN as the response answer = exchange.getIn().getBody(); } } return answer; } /** * Tests whether the exchange has a fault message set and that its not null. * * @param exchange the exchange * @return <tt>true</tt> if fault message exists */ public static boolean hasFaultMessage(Exchange exchange) { return exchange.hasOut() && exchange.getOut().isFault() && exchange.getOut().getBody() != null; } /** * Tests whether the exchange has already been handled by the error handler * * @param exchange the exchange * @return <tt>true</tt> if handled already by error handler, <tt>false</tt> otherwise */ public static boolean hasExceptionBeenHandledByErrorHandler(Exchange exchange) { return Boolean.TRUE.equals(exchange.getProperty(Exchange.ERRORHANDLER_HANDLED)); } /** * Extracts the body from the given future, that represents a handle to an asynchronous exchange. * <p/> * Will wait until the future task is complete. * * @param context the camel context * @param future the future handle * @param type the expected body response type * @return the result body, can be <tt>null</tt>. * @throws CamelExecutionException is thrown if the processing of the exchange failed */ public static <T> T extractFutureBody(CamelContext context, Future<Object> future, Class<T> type) { try { return doExtractFutureBody(context, future.get(), type); } catch (InterruptedException e) { throw ObjectHelper.wrapRuntimeCamelException(e); } catch (ExecutionException e) { // execution failed due to an exception so rethrow the cause throw ObjectHelper.wrapCamelExecutionException(null, e.getCause()); } finally { // its harmless to cancel if task is already completed // and in any case we do not want to get hold of the task a 2nd time // and its recommended to cancel according to Brian Goetz in his Java Concurrency in Practice book future.cancel(true); } } /** * Extracts the body from the given future, that represents a handle to an asynchronous exchange. * <p/> * Will wait for the future task to complete, but waiting at most the timeout value. * * @param context the camel context * @param future the future handle * @param timeout timeout value * @param unit timeout unit * @param type the expected body response type * @return the result body, can be <tt>null</tt>. * @throws CamelExecutionException is thrown if the processing of the exchange failed * @throws java.util.concurrent.TimeoutException * is thrown if a timeout triggered */ public static <T> T extractFutureBody(CamelContext context, Future<Object> future, long timeout, TimeUnit unit, Class<T> type) throws TimeoutException { try { if (timeout > 0) { return doExtractFutureBody(context, future.get(timeout, unit), type); } else { return doExtractFutureBody(context, future.get(), type); } } catch (InterruptedException e) { // execution failed due interruption so rethrow the cause throw ObjectHelper.wrapCamelExecutionException(null, e); } catch (ExecutionException e) { // execution failed due to an exception so rethrow the cause throw ObjectHelper.wrapCamelExecutionException(null, e.getCause()); } finally { // its harmless to cancel if task is already completed // and in any case we do not want to get hold of the task a 2nd time // and its recommended to cancel according to Brian Goetz in his Java Concurrency in Practice book future.cancel(true); } } private static <T> T doExtractFutureBody(CamelContext context, Object result, Class<T> type) { if (result == null) { return null; } if (type.isAssignableFrom(result.getClass())) { return type.cast(result); } if (result instanceof Exchange) { Exchange exchange = (Exchange) result; Object answer = ExchangeHelper.extractResultBody(exchange, exchange.getPattern()); return context.getTypeConverter().convertTo(type, answer); } return context.getTypeConverter().convertTo(type, result); } /** * Creates an exception message with the provided details. * <p/> * All fields is optional so you can pass in only an exception, or just a message etc. or any combination. * * @param message the message * @param exchange the exchange * @param cause the caused exception * @return an error message (without stacktrace from exception) */ public static String createExceptionMessage(String message, Exchange exchange, Throwable cause) { StringBuilder sb = new StringBuilder(); if (message != null) { sb.append(message); } if (exchange != null) { if (sb.length() > 0) { sb.append(". "); } sb.append(exchange); } if (cause != null) { if (sb.length() > 0) { sb.append(". "); } sb.append("Caused by: [" + cause.getClass().getName() + " - " + cause.getMessage() + "]"); } return sb.toString().trim(); } /** * Strategy to prepare results before next iterator or when we are complete, * which is done by copying OUT to IN, so there is only an IN as input * for the next iteration. * * @param exchange the exchange to prepare */ public static void prepareOutToIn(Exchange exchange) { // we are routing using pipes and filters so we need to manually copy OUT to IN if (exchange.hasOut()) { exchange.getIn().copyFrom(exchange.getOut()); exchange.setOut(null); } } }
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.content.browser; import android.content.Context; import android.os.StrictMode; import androidx.annotation.IntDef; import androidx.annotation.VisibleForTesting; import org.chromium.base.BuildInfo; import org.chromium.base.ContextUtils; import org.chromium.base.Log; import org.chromium.base.ThreadUtils; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.NativeMethods; import org.chromium.base.library_loader.LibraryLoader; import org.chromium.base.library_loader.LibraryProcessType; import org.chromium.base.library_loader.LoaderErrors; import org.chromium.base.library_loader.ProcessInitException; import org.chromium.base.metrics.ScopedSysTraceEvent; import org.chromium.base.task.PostTask; import org.chromium.content.app.ContentMain; import org.chromium.content.browser.ServicificationStartupUma.ServicificationStartup; import org.chromium.content_public.browser.BrowserStartupController; import org.chromium.content_public.browser.UiThreadTaskTraits; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.ArrayList; import java.util.List; /** * Implementation of {@link BrowserStartupController}. * This is a singleton, and stores a reference to the application context. */ @JNINamespace("content") public class BrowserStartupControllerImpl implements BrowserStartupController { private static final String TAG = "BrowserStartup"; // Helper constants for {@link #executeEnqueuedCallbacks(int, boolean)}. @VisibleForTesting static final int STARTUP_SUCCESS = -1; @VisibleForTesting static final int STARTUP_FAILURE = 1; @IntDef({BrowserStartType.FULL_BROWSER, BrowserStartType.MINIMAL_BROWSER}) @Retention(RetentionPolicy.SOURCE) public @interface BrowserStartType { int FULL_BROWSER = 0; int MINIMAL_BROWSER = 1; } private static BrowserStartupControllerImpl sInstance; private static boolean sShouldStartGpuProcessOnBrowserStartup; @VisibleForTesting @CalledByNative static void browserStartupComplete(int result) { if (sInstance != null) { sInstance.executeEnqueuedCallbacks(result); } } @CalledByNative static void minimalBrowserStartupComplete() { if (sInstance != null) { sInstance.minimalBrowserStarted(); } } @CalledByNative static boolean shouldStartGpuProcessOnBrowserStartup() { return sShouldStartGpuProcessOnBrowserStartup; } // A list of callbacks that should be called when the async startup of the browser process is // complete. private final List<StartupCallback> mAsyncStartupCallbacks; // A list of callbacks that should be called after a minimal browser environment is initialized. // These callbacks will be called once all the ongoing requests to start a minimal or full // browser process are completed. For example, if there is no outstanding request to start full // browser process, the callbacks will be executed once the minimal browser starts. On the other // hand, the callbacks will be defered until full browser starts. private final List<StartupCallback> mMinimalBrowserStartedCallbacks; // Whether the async startup of the browser process has started. private boolean mHasStartedInitializingBrowserProcess; // Ensures prepareToStartBrowserProcess() logic happens only once. private boolean mPrepareToStartCompleted; private boolean mHasCalledContentStart; // Whether the async startup of the browser process is complete. private boolean mFullBrowserStartupDone; // This field is set after startup has been completed based on whether the startup was a success // or not. It is used when later requests to startup come in that happen after the initial set // of enqueued callbacks have been executed. private boolean mStartupSuccess; // Tests may inject a method to be run instead of calling ContentMain() in order for them to // initialize the C++ system via another means. private Runnable mContentMainCallbackForTests; // Browser start up type. If the type is |BROWSER_START_TYPE_MINIMAL|, start up // will be paused after the minimal environment is setup. Additional request to launch the full // browser process is needed to fully complete the startup process. Callbacks will executed // once the browser is fully started, or when the minimal environment is setup and there are no // outstanding requests to start the full browser. @BrowserStartType private int mCurrentBrowserStartType = BrowserStartType.FULL_BROWSER; // If the app is only started with a minimal browser, whether it needs to launch full browser // funcionalities now. private boolean mLaunchFullBrowserAfterMinimalBrowserStart; // Whether the minimal browser environment is set up. private boolean mMinimalBrowserStarted; private TracingControllerAndroidImpl mTracingController; BrowserStartupControllerImpl() { mAsyncStartupCallbacks = new ArrayList<>(); mMinimalBrowserStartedCallbacks = new ArrayList<>(); if (BuildInfo.isDebugAndroid()) { // Only set up the tracing broadcast receiver on debug builds of the OS. Normal tracing // should use the DevTools API. PostTask.postTask(UiThreadTaskTraits.DEFAULT, new Runnable() { @Override public void run() { addStartupCompletedObserver(new StartupCallback() { @Override public void onSuccess() { assert mTracingController == null; Context context = ContextUtils.getApplicationContext(); mTracingController = new TracingControllerAndroidImpl(context); mTracingController.registerReceiver(context); } @Override public void onFailure() { // Startup failed. } }); } }); } } /** * Get BrowserStartupController instance, create a new one if no existing. * * @return BrowserStartupController instance. */ public static BrowserStartupController getInstance() { assert ThreadUtils.runningOnUiThread() : "Tried to start the browser on the wrong thread."; ThreadUtils.assertOnUiThread(); if (sInstance == null) { sInstance = new BrowserStartupControllerImpl(); } return sInstance; } @VisibleForTesting public static void overrideInstanceForTest(BrowserStartupController controller) { sInstance = (BrowserStartupControllerImpl) controller; } @Override public void startBrowserProcessesAsync(@LibraryProcessType int libraryProcessType, boolean startGpuProcess, boolean startMinimalBrowser, final StartupCallback callback) { assert !LibraryLoader.isBrowserProcessStartupBlockedForTesting(); assertProcessTypeSupported(libraryProcessType); assert ThreadUtils.runningOnUiThread() : "Tried to start the browser on the wrong thread."; ServicificationStartupUma.getInstance().record(ServicificationStartupUma.getStartupMode( mFullBrowserStartupDone, mMinimalBrowserStarted, startMinimalBrowser)); if (mFullBrowserStartupDone || (startMinimalBrowser && mMinimalBrowserStarted)) { // Browser process initialization has already been completed, so we can immediately post // the callback. postStartupCompleted(callback); return; } // Browser process has not been fully started yet, so we defer executing the callback. if (startMinimalBrowser) { mMinimalBrowserStartedCallbacks.add(callback); } else { mAsyncStartupCallbacks.add(callback); } // If a minimal browser process is launched, we need to relaunch the full process in // minimalBrowserStarted() if such a request was received. mLaunchFullBrowserAfterMinimalBrowserStart |= (mCurrentBrowserStartType == BrowserStartType.MINIMAL_BROWSER) && !startMinimalBrowser; if (!mHasStartedInitializingBrowserProcess) { // This is the first time we have been asked to start the browser process. We set the // flag that indicates that we have kicked off starting the browser process. mHasStartedInitializingBrowserProcess = true; sShouldStartGpuProcessOnBrowserStartup = startGpuProcess; // Start-up at this point occurs before the first frame of the app is drawn. Although // contentStart() can be called eagerly, deferring it would allow a frame to be drawn, // so that Android reports Chrome to start before our SurfaceView has rendered. Our // metrics have also adapted to this. Therefore we wrap contentStart() into Runnable, // and let prepareToStartBrowserProcess() decide whether to defer it by a frame (in // production) or not (overridden in tests). http://b/181151614#comment6 prepareToStartBrowserProcess(false, new Runnable() { @Override public void run() { ThreadUtils.assertOnUiThread(); if (mHasCalledContentStart) return; mCurrentBrowserStartType = startMinimalBrowser ? BrowserStartType.MINIMAL_BROWSER : BrowserStartType.FULL_BROWSER; if (contentStart() > 0) { // Failed. The callbacks may not have run, so run them. enqueueCallbackExecutionOnStartupFailure(); } } }); } else if (mMinimalBrowserStarted && mLaunchFullBrowserAfterMinimalBrowserStart) { // If we missed the minimalBrowserStarted() call, launch the full browser now if needed. // Otherwise, minimalBrowserStarted() will handle the full browser launch. mCurrentBrowserStartType = BrowserStartType.FULL_BROWSER; if (contentStart() > 0) enqueueCallbackExecutionOnStartupFailure(); } } @Override public void startBrowserProcessesSync( @LibraryProcessType int libraryProcessType, boolean singleProcess) { assert !LibraryLoader.isBrowserProcessStartupBlockedForTesting(); assertProcessTypeSupported(libraryProcessType); ServicificationStartupUma.getInstance().record(ServicificationStartupUma.getStartupMode( mFullBrowserStartupDone, mMinimalBrowserStarted, false /* startMinimalBrowser */)); // If already started skip to checking the result if (!mFullBrowserStartupDone) { // contentStart() need not be deferred, so passing null. prepareToStartBrowserProcess(singleProcess, null /* deferrableTask */); boolean startedSuccessfully = true; if (!mHasCalledContentStart || mCurrentBrowserStartType == BrowserStartType.MINIMAL_BROWSER) { mCurrentBrowserStartType = BrowserStartType.FULL_BROWSER; if (contentStart() > 0) { // Failed. The callbacks may not have run, so run them. enqueueCallbackExecutionOnStartupFailure(); startedSuccessfully = false; } } if (startedSuccessfully) { flushStartupTasks(); } } // Startup should now be complete assert mFullBrowserStartupDone; if (!mStartupSuccess) { throw new ProcessInitException(LoaderErrors.NATIVE_STARTUP_FAILED); } } /** * Start the browser process by calling ContentMain.start(). */ int contentStart() { int result = 0; if (mContentMainCallbackForTests == null) { boolean startMinimalBrowser = mCurrentBrowserStartType == BrowserStartType.MINIMAL_BROWSER; result = contentMainStart(startMinimalBrowser); // No need to launch the full browser again if we are launching full browser now. if (!startMinimalBrowser) mLaunchFullBrowserAfterMinimalBrowserStart = false; } else { assert mCurrentBrowserStartType == BrowserStartType.FULL_BROWSER; // Run the injected Runnable instead of ContentMain(). mContentMainCallbackForTests.run(); mLaunchFullBrowserAfterMinimalBrowserStart = false; } mHasCalledContentStart = true; return result; } @Override public void setContentMainCallbackForTests(Runnable r) { assert !mHasCalledContentStart; mContentMainCallbackForTests = r; } /** * Wrap ContentMain.start() for testing. */ @VisibleForTesting int contentMainStart(boolean startMinimalBrowser) { return ContentMain.start(startMinimalBrowser); } @VisibleForTesting void flushStartupTasks() { BrowserStartupControllerImplJni.get().flushStartupTasks(); } @Override public boolean isFullBrowserStarted() { ThreadUtils.assertOnUiThread(); return mFullBrowserStartupDone && mStartupSuccess; } @Override public boolean isRunningInMinimalBrowserMode() { ThreadUtils.assertOnUiThread(); return mMinimalBrowserStarted && !mFullBrowserStartupDone && mStartupSuccess; } @Override public boolean isNativeStarted() { ThreadUtils.assertOnUiThread(); return (mMinimalBrowserStarted || mFullBrowserStartupDone) && mStartupSuccess; } @Override public void addStartupCompletedObserver(StartupCallback callback) { ThreadUtils.assertOnUiThread(); if (mFullBrowserStartupDone) { postStartupCompleted(callback); } else { mAsyncStartupCallbacks.add(callback); } } @Override public @ServicificationStartup int getStartupMode(boolean startMinimalBrowser) { return ServicificationStartupUma.getStartupMode( mFullBrowserStartupDone, mMinimalBrowserStarted, startMinimalBrowser); } /** * Asserts that library process type is one of the supported types. * @param libraryProcessType the type of process the shared library is loaded. It must be * LibraryProcessType.PROCESS_BROWSER, * LibraryProcessType.PROCESS_WEBVIEW or * LibraryProcessType.PROCESS_WEBLAYER. */ private void assertProcessTypeSupported(@LibraryProcessType int libraryProcessType) { assert LibraryProcessType.PROCESS_BROWSER == libraryProcessType || LibraryProcessType.PROCESS_WEBVIEW == libraryProcessType || LibraryProcessType.PROCESS_WEBLAYER == libraryProcessType; LibraryLoader.getInstance().assertCompatibleProcessType(libraryProcessType); } /** * Called when the minimal browser environment is done initializing. */ private void minimalBrowserStarted() { mMinimalBrowserStarted = true; if (mLaunchFullBrowserAfterMinimalBrowserStart) { // If startFullBrowser() fails, execute the callbacks right away. Otherwise, // callbacks will be deferred until browser startup completes. mCurrentBrowserStartType = BrowserStartType.FULL_BROWSER; if (contentStart() > 0) enqueueCallbackExecutionOnStartupFailure(); return; } if (mCurrentBrowserStartType == BrowserStartType.MINIMAL_BROWSER) { executeMinimalBrowserStartupCallbacks(STARTUP_SUCCESS); } recordStartupUma(); } private void executeEnqueuedCallbacks(int startupResult) { assert ThreadUtils.runningOnUiThread() : "Callback from browser startup from wrong thread."; mFullBrowserStartupDone = true; mStartupSuccess = (startupResult <= 0); for (StartupCallback asyncStartupCallback : mAsyncStartupCallbacks) { if (mStartupSuccess) { asyncStartupCallback.onSuccess(); } else { asyncStartupCallback.onFailure(); } } // We don't want to hold on to any objects after we do not need them anymore. mAsyncStartupCallbacks.clear(); executeMinimalBrowserStartupCallbacks(startupResult); recordStartupUma(); } private void executeMinimalBrowserStartupCallbacks(int startupResult) { mStartupSuccess = (startupResult <= 0); for (StartupCallback callback : mMinimalBrowserStartedCallbacks) { if (mStartupSuccess) { callback.onSuccess(); } else { callback.onFailure(); } } mMinimalBrowserStartedCallbacks.clear(); } // Post a task to tell the callbacks that startup failed. Since the execution clears the // callback lists, it is safe to call this more than once. private void enqueueCallbackExecutionOnStartupFailure() { PostTask.postTask( UiThreadTaskTraits.BOOTSTRAP, () -> executeEnqueuedCallbacks(STARTUP_FAILURE)); } private void postStartupCompleted(final StartupCallback callback) { PostTask.postTask(UiThreadTaskTraits.BOOTSTRAP, new Runnable() { @Override public void run() { if (mStartupSuccess) { callback.onSuccess(); } else { callback.onFailure(); } } }); } @VisibleForTesting void prepareToStartBrowserProcess(final boolean singleProcess, final Runnable deferrableTask) { if (mPrepareToStartCompleted) { return; } Log.d(TAG, "Initializing chromium process, singleProcess=%b", singleProcess); mPrepareToStartCompleted = true; try (ScopedSysTraceEvent e = ScopedSysTraceEvent.scoped("prepareToStartBrowserProcess")) { // This strictmode exception is to cover the case where the browser process is being // started asynchronously but not in the main browser flow. The main browser flow will // trigger library loading earlier and this will be a no-op, but in the other cases this // will need to block on loading libraries. This applies to tests and // ManageSpaceActivity, which can be launched from Settings. StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads(); try { // Normally Main.java will have already loaded the library asynchronously, we only // need to load it here if we arrived via another flow, e.g. bookmark access & sync // setup. LibraryLoader.getInstance().ensureInitialized(); } finally { StrictMode.setThreadPolicy(oldPolicy); } // TODO(yfriedman): Remove dependency on a command line flag for this. DeviceUtilsImpl.addDeviceSpecificUserAgentSwitch(); BrowserStartupControllerImplJni.get().setCommandLineFlags(singleProcess); } if (deferrableTask != null) { PostTask.postTask(UiThreadTaskTraits.USER_BLOCKING, deferrableTask); } } /** * Can be overridden by testing. */ @VisibleForTesting void recordStartupUma() { ServicificationStartupUma.getInstance().commit(); } @NativeMethods interface Natives { void setCommandLineFlags(boolean singleProcess); void flushStartupTasks(); } }
package org.keedio.flume.interceptor.enrichment.serialization.avro; import org.keedio.flume.interceptor.enrichment.interceptor.EnrichedEventBody; import org.keedio.flume.interceptor.enrichment.interceptor.EnrichedEventBodyExtraData; import org.keedio.flume.interceptor.enrichment.interceptor.EnrichedEventBodyGeneric; import org.keedio.flume.interceptor.enrichment.serialization.SerializationBean; import org.keedio.flume.interceptor.enrichment.serialization.SerializerAbstractTest; import org.keedio.flume.interceptor.enrichment.serialization.avro.serializers.AVROReflectiveSerializer; import org.keedio.flume.interceptor.enrichment.serialization.exception.SerializationException; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.Test; import java.util.HashMap; import java.util.Set; /** * Created by PC on 08/06/2016. */ public class AVROReflectiveSerializerTest extends SerializerAbstractTest { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(AVROReflectiveSerializerTest.class); @Test public void testAVROReflectiveSerializerWithoutSchemaFromSerializerFactory() { try { //Get serializer from factory SerializationBean serializationBean = createSerializationBeanAVROReflectiveSerializer(); serializationBean.setSchema(null); AVROReflectiveSerializer avroReflectiveSerializer = getAVROReflectiveSerializer(serializationBean); //SerializationException has been thrown Assert.fail("Unexpected exception in testAVROReflectiveSerializerWithoutSchemaFromSerializerFactory"); } catch (SerializationException e) { //Is expected exception } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerWithoutSchemaFromSerializerFactory"); } } @Test public void testAVROReflectiveSerializerWithoutSchema() { try { //Get serializer directly (not from Serializer factory) AVROReflectiveSerializer avroReflectiveSerializer = new AVROReflectiveSerializer(null); //SerializationException has been thrown Assert.fail("Unexpected exception in testAVROReflectiveSerializerWithoutSchema"); } catch (SerializationException e) { //Is expected exception } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerWithoutSchema"); } } @Test public void testAVROReflectiveSerializerSerializationError() { try { //Get serializer SerializationBean serializationBean = createSerializationBeanAVROReflectiveSerializer(); AVROReflectiveSerializer avroReflectiveSerializer = getAVROReflectiveSerializer(serializationBean); //Serialize a fake object (the class of the object is not the same that the class of the serializer) byte[] byteArrayAVROSerialization = avroReflectiveSerializer.toBytes(createSpecificRecordFakeClass()); //SerializationException has been thrown Assert.fail("Unexpected exception in testAVROReflectiveSerializerSerializationError"); } catch (SerializationException e) { //Is expected exception } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerSerializationError"); } } @Test public void testAVROReflectiveSerializerDeserializationError() { try { //Get serializer SerializationBean serializationBean = createSerializationBeanAVROReflectiveSerializer(); AVROReflectiveSerializer avroReflectiveSerializer = getAVROReflectiveSerializer(serializationBean); //Create a null byteArray byte[] byteArrayAVROSerialization = null; //Deserialization of the byteArray EnrichedEventBody enrichedEventBody = (EnrichedEventBody) avroReflectiveSerializer.toObject(byteArrayAVROSerialization); //SerializationException has been thrown Assert.fail("Unexpected exception in testAVROReflectiveSerializerDeserializationError"); } catch (SerializationException e) { //Is expected exception } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerDeserializationError"); } } @Test public void testAVROReflectiveSerializerSerializationDeserialization() { try { //Get serializer SerializationBean serializationBean = createSerializationBeanAVROReflectiveSerializer(); //serializationBean.setSchema(ReflectData.get().getSchema(EnrichedEventBody.class).toString()); AVROReflectiveSerializer<EnrichedEventBody> avroReflectiveSerializer = getAVROReflectiveSerializer(serializationBean); //Create a EnrichedEventBody object EnrichedEventBody enrichedEventBody = createEnrichedEventBody(); //Serialization of the EnrichedEventBody object byte[] byteArrayAVROSerialization = avroReflectiveSerializer.toBytes(enrichedEventBody); //Verify that the serialization has contents Assert.assertTrue(byteArrayAVROSerialization.length > 0 ,"The serialization process is not correct."); //Deserialization of the byte array EnrichedEventBody enrichedEventBodyDeserialized = avroReflectiveSerializer.toObject(byteArrayAVROSerialization); //SerializationException has been thrown (the class need a empty constructor for the reflective serializer) Assert.fail("Unexpected exception in testAVROReflectiveSerializerDeserializationError"); } catch (SerializationException e) { //Is expected exception } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerSerializationDeserialization"); } } @Test public void testAVROReflectiveSerializerSerializationDeserializationNotEnrichedWithEnrichedEventBodyExtraData() { try { //Get serializer SerializationBean serializationBean = createSerializationBeanAVROReflectiveSerializerGeneric(); //serializationBean.setSchema(ReflectData.get().getSchema(EnrichedEventBody.class).toString()); AVROReflectiveSerializer<EnrichedEventBodyGeneric> avroReflectiveSerializer = getAVROReflectiveSerializer(serializationBean); //Create a not enriched EnrichedEventBodyGeneric object (with EnrichedEventBodyExtraData Class) String message = "hello"; EnrichedEventBodyGeneric<EnrichedEventBodyExtraData> enrichedEventBodyGeneric = EnrichedEventBodyGeneric.createFromEventBody(message.getBytes(), false, EnrichedEventBodyExtraData.class, avroReflectiveSerializer); //Serialization of the EnrichedEventBody object byte[] byteArrayAVROSerialization = avroReflectiveSerializer.toBytes(enrichedEventBodyGeneric); //Verify that the serialization has contents Assert.assertTrue(byteArrayAVROSerialization.length > 0 ,"The serialization process is not correct."); //Deserialization of the byte array EnrichedEventBodyGeneric<EnrichedEventBodyExtraData> enrichedEventBodyGenericDeserialized = avroReflectiveSerializer.toObject(byteArrayAVROSerialization); //Verify that the deserialization has been done Assert.assertNotNull(enrichedEventBodyGenericDeserialized ,"The deserialization process is not correct."); //Test equality of the original object and the deserialized object Assert.assertEquals(enrichedEventBodyGeneric.getMessage(), enrichedEventBodyGenericDeserialized.getMessage(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getTopic(), enrichedEventBodyGenericDeserialized.getExtraData().getTopic(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getTimestamp(), enrichedEventBodyGenericDeserialized.getExtraData().getTimestamp(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getSha1Hex(), enrichedEventBodyGenericDeserialized.getExtraData().getSha1Hex(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getFilePath(), enrichedEventBodyGenericDeserialized.getExtraData().getFilePath(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getFileName(), enrichedEventBodyGenericDeserialized.getExtraData().getFileName(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getLineNumber(), enrichedEventBodyGenericDeserialized.getExtraData().getLineNumber(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getType(), enrichedEventBodyGenericDeserialized.getExtraData().getType(),"The serialization/deserialization process is not correct."); } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerSerializationDeserializationNotEnrichedWithEnrichedEventBodyExtraData"); } } @Test public void testAVROReflectiveSerializerSerializationDeserializationGenericWithEnrichedEventBodyExtraData() { try { //Get serializer SerializationBean serializationBean = createSerializationBeanAVROReflectiveSerializerGeneric(); //serializationBean.setSchema(ReflectData.get().getSchema(EnrichedEventBody.class).toString()); AVROReflectiveSerializer<EnrichedEventBodyGeneric> avroReflectiveSerializer = getAVROReflectiveSerializer(serializationBean); //Create a EnrichedEventBodyGeneric<EnrichedEventBodyExtraData> object EnrichedEventBodyGeneric<EnrichedEventBodyExtraData> enrichedEventBodyGeneric = createEnrichedEventBodyGeneric(); //Serialization of the EnrichedEventBodyGeneric object byte[] byteArrayAVROSerialization = avroReflectiveSerializer.toBytes(enrichedEventBodyGeneric); //Verify that the serialization has contents Assert.assertTrue(byteArrayAVROSerialization.length > 0 ,"The serialization process is not correct."); //Deserialization of the byte array EnrichedEventBodyGeneric<EnrichedEventBodyExtraData> enrichedEventBodyGenericDeserialized = (EnrichedEventBodyGeneric<EnrichedEventBodyExtraData>) avroReflectiveSerializer.toObject(byteArrayAVROSerialization); //Verify that the deserialization has been done Assert.assertNotNull(enrichedEventBodyGenericDeserialized ,"The deserialization process is not correct."); //Test equality of the original object and the deserialized object Assert.assertEquals(enrichedEventBodyGeneric.getMessage(), enrichedEventBodyGenericDeserialized.getMessage(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getTopic(), enrichedEventBodyGenericDeserialized.getExtraData().getTopic(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getTimestamp(), enrichedEventBodyGenericDeserialized.getExtraData().getTimestamp(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getSha1Hex(), enrichedEventBodyGenericDeserialized.getExtraData().getSha1Hex(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getFilePath(), enrichedEventBodyGenericDeserialized.getExtraData().getFilePath(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getFileName(), enrichedEventBodyGenericDeserialized.getExtraData().getFileName(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getLineNumber(), enrichedEventBodyGenericDeserialized.getExtraData().getLineNumber(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().getType(), enrichedEventBodyGenericDeserialized.getExtraData().getType(),"The serialization/deserialization process is not correct."); } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerSerializationDeserializationGenericWithEnrichedEventBodyExtraData"); } } @Test public void testAVROReflectiveSerializerSerializationDeserializationGenericWithHashMap() { try { //Get serializer SerializationBean serializationBean = createSerializationBeanAVROReflectiveSerializerGenericHashMap(); AVROReflectiveSerializer<EnrichedEventBodyGeneric> avroReflectiveSerializer = getAVROReflectiveSerializer(serializationBean); //Create a EnrichedEventBodyGeneric<EnrichedEventBodyExtraData> object EnrichedEventBodyGeneric<HashMap<String, String>> enrichedEventBodyGeneric = createEnrichedEventBodyGenericHashMap(); //Serialization of the EnrichedEventBodyGeneric object byte[] byteArrayAVROSerialization = avroReflectiveSerializer.toBytes(enrichedEventBodyGeneric); //Verify that the serialization has contents Assert.assertTrue(byteArrayAVROSerialization.length > 0 ,"The serialization process is not correct."); //Deserialization of the byte array EnrichedEventBodyGeneric<HashMap<String, String>> enrichedEventBodyGenericDeserialized = (EnrichedEventBodyGeneric<HashMap<String, String>>) avroReflectiveSerializer.toObject(byteArrayAVROSerialization); //Verify that the deserialization has been done Assert.assertNotNull(enrichedEventBodyGenericDeserialized ,"The deserialization process is not correct."); //Test equality of the original object and the deserialized object Assert.assertEquals(enrichedEventBodyGeneric.getMessage(), enrichedEventBodyGenericDeserialized.getMessage(),"The serialization/deserialization process is not correct."); Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().size(), enrichedEventBodyGenericDeserialized.getExtraData().size(),"The serialization/deserialization process is not correct."); Set<String> keysetEnrichedEventBody = enrichedEventBodyGeneric.getExtraData().keySet(); for (String keyEnrichedEventBod : keysetEnrichedEventBody) { Assert.assertEquals(enrichedEventBodyGeneric.getExtraData().get(keyEnrichedEventBod), enrichedEventBodyGenericDeserialized.getExtraData().get(keyEnrichedEventBod), "The serialization/deserialization process is not correct."); } } catch (Exception e) { e.printStackTrace(); Assert.fail("Unexpected exception in testAVROReflectiveSerializerSerializationDeserializationGenericWithHashMap"); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.firestore.v1; import static com.google.cloud.firestore.v1.FirestoreAdminClient.ListFieldsPagedResponse; import static com.google.cloud.firestore.v1.FirestoreAdminClient.ListIndexesPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.common.collect.Lists; import com.google.firestore.admin.v1.CollectionGroupName; import com.google.firestore.admin.v1.CreateIndexRequest; import com.google.firestore.admin.v1.DatabaseName; import com.google.firestore.admin.v1.DeleteIndexRequest; import com.google.firestore.admin.v1.ExportDocumentsRequest; import com.google.firestore.admin.v1.ExportDocumentsResponse; import com.google.firestore.admin.v1.Field; import com.google.firestore.admin.v1.FieldName; import com.google.firestore.admin.v1.GetFieldRequest; import com.google.firestore.admin.v1.GetIndexRequest; import com.google.firestore.admin.v1.ImportDocumentsRequest; import com.google.firestore.admin.v1.Index; import com.google.firestore.admin.v1.IndexName; import com.google.firestore.admin.v1.ListFieldsRequest; import com.google.firestore.admin.v1.ListFieldsResponse; import com.google.firestore.admin.v1.ListIndexesRequest; import com.google.firestore.admin.v1.ListIndexesResponse; import com.google.firestore.admin.v1.UpdateFieldRequest; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.Empty; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class FirestoreAdminClientTest { private static MockFirestoreAdmin mockFirestoreAdmin; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private FirestoreAdminClient client; @BeforeClass public static void startStaticServer() { mockFirestoreAdmin = new MockFirestoreAdmin(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockFirestoreAdmin)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); FirestoreAdminSettings settings = FirestoreAdminSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = FirestoreAdminClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void createIndexTest() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]").toString()) .addAllFields(new ArrayList<Index.IndexField>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createIndexTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockFirestoreAdmin.addResponse(resultOperation); CollectionGroupName parent = CollectionGroupName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]"); Index index = Index.newBuilder().build(); Index actualResponse = client.createIndexAsync(parent, index).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateIndexRequest actualRequest = ((CreateIndexRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(index, actualRequest.getIndex()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createIndexExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { CollectionGroupName parent = CollectionGroupName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]"); Index index = Index.newBuilder().build(); client.createIndexAsync(parent, index).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void createIndexTest2() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]").toString()) .addAllFields(new ArrayList<Index.IndexField>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createIndexTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockFirestoreAdmin.addResponse(resultOperation); String parent = "parent-995424086"; Index index = Index.newBuilder().build(); Index actualResponse = client.createIndexAsync(parent, index).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateIndexRequest actualRequest = ((CreateIndexRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(index, actualRequest.getIndex()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createIndexExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String parent = "parent-995424086"; Index index = Index.newBuilder().build(); client.createIndexAsync(parent, index).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void listIndexesTest() throws Exception { Index responsesElement = Index.newBuilder().build(); ListIndexesResponse expectedResponse = ListIndexesResponse.newBuilder() .setNextPageToken("") .addAllIndexes(Arrays.asList(responsesElement)) .build(); mockFirestoreAdmin.addResponse(expectedResponse); CollectionGroupName parent = CollectionGroupName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]"); ListIndexesPagedResponse pagedListResponse = client.listIndexes(parent); List<Index> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getIndexesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListIndexesRequest actualRequest = ((ListIndexesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listIndexesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { CollectionGroupName parent = CollectionGroupName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]"); client.listIndexes(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listIndexesTest2() throws Exception { Index responsesElement = Index.newBuilder().build(); ListIndexesResponse expectedResponse = ListIndexesResponse.newBuilder() .setNextPageToken("") .addAllIndexes(Arrays.asList(responsesElement)) .build(); mockFirestoreAdmin.addResponse(expectedResponse); String parent = "parent-995424086"; ListIndexesPagedResponse pagedListResponse = client.listIndexes(parent); List<Index> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getIndexesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListIndexesRequest actualRequest = ((ListIndexesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listIndexesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String parent = "parent-995424086"; client.listIndexes(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIndexTest() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]").toString()) .addAllFields(new ArrayList<Index.IndexField>()) .build(); mockFirestoreAdmin.addResponse(expectedResponse); IndexName name = IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]"); Index actualResponse = client.getIndex(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIndexRequest actualRequest = ((GetIndexRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIndexExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { IndexName name = IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]"); client.getIndex(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIndexTest2() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]").toString()) .addAllFields(new ArrayList<Index.IndexField>()) .build(); mockFirestoreAdmin.addResponse(expectedResponse); String name = "name3373707"; Index actualResponse = client.getIndex(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIndexRequest actualRequest = ((GetIndexRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIndexExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String name = "name3373707"; client.getIndex(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteIndexTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockFirestoreAdmin.addResponse(expectedResponse); IndexName name = IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]"); client.deleteIndex(name); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteIndexRequest actualRequest = ((DeleteIndexRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteIndexExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { IndexName name = IndexName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[INDEX]"); client.deleteIndex(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteIndexTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockFirestoreAdmin.addResponse(expectedResponse); String name = "name3373707"; client.deleteIndex(name); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteIndexRequest actualRequest = ((DeleteIndexRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteIndexExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String name = "name3373707"; client.deleteIndex(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getFieldTest() throws Exception { Field expectedResponse = Field.newBuilder() .setName(FieldName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[FIELD]").toString()) .setIndexConfig(Field.IndexConfig.newBuilder().build()) .build(); mockFirestoreAdmin.addResponse(expectedResponse); FieldName name = FieldName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[FIELD]"); Field actualResponse = client.getField(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetFieldRequest actualRequest = ((GetFieldRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getFieldExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { FieldName name = FieldName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[FIELD]"); client.getField(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getFieldTest2() throws Exception { Field expectedResponse = Field.newBuilder() .setName(FieldName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[FIELD]").toString()) .setIndexConfig(Field.IndexConfig.newBuilder().build()) .build(); mockFirestoreAdmin.addResponse(expectedResponse); String name = "name3373707"; Field actualResponse = client.getField(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetFieldRequest actualRequest = ((GetFieldRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getFieldExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String name = "name3373707"; client.getField(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateFieldTest() throws Exception { Field expectedResponse = Field.newBuilder() .setName(FieldName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]", "[FIELD]").toString()) .setIndexConfig(Field.IndexConfig.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateFieldTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockFirestoreAdmin.addResponse(resultOperation); Field field = Field.newBuilder().build(); Field actualResponse = client.updateFieldAsync(field).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateFieldRequest actualRequest = ((UpdateFieldRequest) actualRequests.get(0)); Assert.assertEquals(field, actualRequest.getField()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateFieldExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { Field field = Field.newBuilder().build(); client.updateFieldAsync(field).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void listFieldsTest() throws Exception { Field responsesElement = Field.newBuilder().build(); ListFieldsResponse expectedResponse = ListFieldsResponse.newBuilder() .setNextPageToken("") .addAllFields(Arrays.asList(responsesElement)) .build(); mockFirestoreAdmin.addResponse(expectedResponse); CollectionGroupName parent = CollectionGroupName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]"); ListFieldsPagedResponse pagedListResponse = client.listFields(parent); List<Field> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getFieldsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListFieldsRequest actualRequest = ((ListFieldsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listFieldsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { CollectionGroupName parent = CollectionGroupName.of("[PROJECT]", "[DATABASE]", "[COLLECTION]"); client.listFields(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listFieldsTest2() throws Exception { Field responsesElement = Field.newBuilder().build(); ListFieldsResponse expectedResponse = ListFieldsResponse.newBuilder() .setNextPageToken("") .addAllFields(Arrays.asList(responsesElement)) .build(); mockFirestoreAdmin.addResponse(expectedResponse); String parent = "parent-995424086"; ListFieldsPagedResponse pagedListResponse = client.listFields(parent); List<Field> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getFieldsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListFieldsRequest actualRequest = ((ListFieldsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listFieldsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String parent = "parent-995424086"; client.listFields(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void exportDocumentsTest() throws Exception { ExportDocumentsResponse expectedResponse = ExportDocumentsResponse.newBuilder().setOutputUriPrefix("outputUriPrefix499858205").build(); Operation resultOperation = Operation.newBuilder() .setName("exportDocumentsTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockFirestoreAdmin.addResponse(resultOperation); DatabaseName name = DatabaseName.of("[PROJECT]", "[DATABASE]"); ExportDocumentsResponse actualResponse = client.exportDocumentsAsync(name).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ExportDocumentsRequest actualRequest = ((ExportDocumentsRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void exportDocumentsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { DatabaseName name = DatabaseName.of("[PROJECT]", "[DATABASE]"); client.exportDocumentsAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void exportDocumentsTest2() throws Exception { ExportDocumentsResponse expectedResponse = ExportDocumentsResponse.newBuilder().setOutputUriPrefix("outputUriPrefix499858205").build(); Operation resultOperation = Operation.newBuilder() .setName("exportDocumentsTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockFirestoreAdmin.addResponse(resultOperation); String name = "name3373707"; ExportDocumentsResponse actualResponse = client.exportDocumentsAsync(name).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ExportDocumentsRequest actualRequest = ((ExportDocumentsRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void exportDocumentsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String name = "name3373707"; client.exportDocumentsAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void importDocumentsTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("importDocumentsTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockFirestoreAdmin.addResponse(resultOperation); DatabaseName name = DatabaseName.of("[PROJECT]", "[DATABASE]"); client.importDocumentsAsync(name).get(); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ImportDocumentsRequest actualRequest = ((ImportDocumentsRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void importDocumentsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { DatabaseName name = DatabaseName.of("[PROJECT]", "[DATABASE]"); client.importDocumentsAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void importDocumentsTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("importDocumentsTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockFirestoreAdmin.addResponse(resultOperation); String name = "name3373707"; client.importDocumentsAsync(name).get(); List<AbstractMessage> actualRequests = mockFirestoreAdmin.getRequests(); Assert.assertEquals(1, actualRequests.size()); ImportDocumentsRequest actualRequest = ((ImportDocumentsRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void importDocumentsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockFirestoreAdmin.addException(exception); try { String name = "name3373707"; client.importDocumentsAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } }
package org.helianto.task.domain; import java.util.Date; import java.util.Set; import javax.persistence.DiscriminatorValue; import javax.persistence.Lob; import javax.persistence.OneToMany; import javax.persistence.Transient; import org.helianto.core.domain.Entity; import org.helianto.task.def.ReportFolderContentType; import org.helianto.task.def.Resolution2; import org.helianto.task.domain.ReportFolder; import org.helianto.user.domain.User; import com.fasterxml.jackson.annotation.JsonIgnore; /** * Projects. * * @author mauriciofernandesdecastro */ @javax.persistence.Entity @DiscriminatorValue("J") public class Project extends ReportFolder { private static final long serialVersionUID = 1L; @Lob private String benefits; @Lob private String assumptions; @Lob private String deliverables; @Lob private String constraints; @Lob private String tools; private int estimate; @Transient private Date checkinDate; /** * Default constructor. */ public Project() { this(null, ""); } /** * Key constructor. * * @param entity * @param folderCode */ public Project(Entity entity, String folderCode) { super(entity, folderCode); setContentTypeAsEnum(ReportFolderContentType.PORTFOLIO); } /** * Create constructor. * * @param entityId * @param ownerId * @param categoryId * @param partnerId * @param userGroupId */ public Project(int entityId , int ownerId , int categoryId , int partnerId , int userGroupId) { super(); setEntityId(entityId); setOwnerId(ownerId); setCategoryId(categoryId); setPartnerId(partnerId); setUserGroupId(userGroupId); } /** * Form constructor. * * @param id * @param folderCode * @param folderName * @param folderDecorationUrl * @param patternPrefix * @param numberOfDigits * @param contentType * @param content * @param encoding * @param ownerId * @param reportNumberPattern * @param patternSuffix * @param parsedContent * @param categoryId * @param privacyLevel * @param zIndex * @param partnerId * @param userGroupId * @param folderCaption * @param parentPath * @param nature * @param resolution * @param traceabilityItems * @param startDate * @param endDate * @param volumeTags * @param categoryOverrideAllowed * @param benefits * @param assumptions * @param deliverables * @param constraints * @param tools * @param estimate */ public Project(int id , String folderCode , String folderName , String folderDecorationUrl , String patternPrefix , Integer numberOfDigits , char contentType, byte[] content , String encoding , Integer ownerId , String reportNumberPattern , String patternSuffix , String parsedContent , Integer categoryId , Character privacyLevel , String zIndex , Integer partnerId , Integer userGroupId , String folderCaption , String parentPath , String nature , Resolution2 resolution , String traceabilityItems , Date startDate , Date endDate , String volumeTags , Boolean categoryOverrideAllowed , String benefits , String assumptions , String deliverables , String constraints , String tools , int estimate) { super(id, folderCode, folderName, folderDecorationUrl, patternPrefix, numberOfDigits, contentType, content, encoding, ownerId, reportNumberPattern, patternSuffix, parsedContent, categoryId, privacyLevel, zIndex, partnerId, userGroupId, folderCaption, parentPath, nature, resolution, traceabilityItems, startDate, endDate, volumeTags, categoryOverrideAllowed); setBenefits(benefits); setAssumptions(assumptions); setDeliverables(deliverables); setConstraints(constraints); setTools(tools); setEstimate(estimate); } public Project(User user, char contentType) { super(user, contentType); } public Project(User user) { super(user); } public String getBenefits() { return benefits; } public void setBenefits(String benefits) { this.benefits = benefits; } public String getAssumptions() { return assumptions; } public void setAssumptions(String assumptions) { this.assumptions = assumptions; } public String getDeliverables() { return deliverables; } public void setDeliverables(String deliverables) { this.deliverables = deliverables; } public String getConstraints() { return constraints; } public void setConstraints(String constraints) { this.constraints = constraints; } public String getTools(){ return tools; } public void setTools(String tools){ this.tools = tools; } public int getEstimate() { return estimate; } public void setEstimate(int estimate) { this.estimate = estimate; } public Date getCheckinDate() { return checkinDate; } public void setCheckinDate(Date checkinDate) { this.checkinDate = checkinDate; } /** * Merger. * * @param command */ public Project merge(Project command) { super.merge(command); setBenefits(command.getBenefits()); setAssumptions(command.getAssumptions()); setDeliverables(command.getDeliverables()); setConstraints(command.getConstraints()); setTools(command.getTools()); return this; } }
package edu.rice.rubis.servlets; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * Build the html page with the list of all items for given category and region. * @author <a href="mailto:cecchet@rice.edu">Emmanuel Cecchet</a> and <a href="mailto:julie.marguerite@inrialpes.fr">Julie Marguerite</a> * @version 1.0 */ public class SearchItemsByRegion extends RubisHttpServlet { public int getPoolSize() { return Config.SearchItemsByRegionPoolSize; } /** * Close both statement and connection. */ private void closeConnection(PreparedStatement stmt, Connection conn) { try { if (stmt != null) stmt.close(); // close statement if (conn != null) releaseConnection(conn); } catch (Exception ignore) { } } /** * Display an error message. * @param errorMsg the error message value */ private void printError(String errorMsg, ServletPrinter sp) { sp.printHTMLheader("RUBiS ERROR: SearchItemsByRegion"); sp.printHTML( "<h2>Your request has not been processed due to the following error :</h2><br>"); sp.printHTML(errorMsg); sp.printHTMLfooter(); } /** List items in the given category for the given region */ private void itemList( Integer categoryId, Integer regionId, int page, int nbOfItems, ServletPrinter sp) { String itemName, endDate; int itemId, nbOfBids = 0; float maxBid; ResultSet rs = null; PreparedStatement stmt = null; Connection conn = null; // get the list of items try { conn = getConnection(); stmt = conn.prepareStatement( "SELECT items.name, items.id, items.end_date, items.max_bid, items.nb_of_bids, items.initial_price FROM items,users WHERE items.category=? AND items.seller=users.id AND users.region=? AND end_date>=NOW() ORDER BY items.end_date ASC LIMIT ?,?"); stmt.setInt(1, categoryId.intValue()); stmt.setInt(2, regionId.intValue()); stmt.setInt(3, page * nbOfItems); stmt.setInt(4, nbOfItems); rs = stmt.executeQuery(); } catch (Exception e) { sp.printHTML("Failed to execute Query for items in region: " + e); closeConnection(stmt, conn); return; } try { if (!rs.first()) { if (page == 0) { sp.printHTML( "<h3>Sorry, but there is no items in this category for this region.</h3><br>"); } else { sp.printHTML( "<h3>Sorry, but there is no more items in this category for this region.</h3><br>"); sp.printItemHeader(); sp.printItemFooter( "<a href=\"/rubis_servlets/servlet/edu.rice.rubis.servlets.SearchItemsByRegion?category=" + categoryId + "&region=" + regionId + "&page=" + (page - 1) + "&nbOfItems=" + nbOfItems + "\">Previous page</a>", ""); } closeConnection(stmt, conn); return; } sp.printItemHeader(); do { itemName = rs.getString("name"); itemId = rs.getInt("id"); endDate = rs.getString("end_date"); maxBid = rs.getFloat("max_bid"); nbOfBids = rs.getInt("nb_of_bids"); float initialPrice = rs.getFloat("initial_price"); if (maxBid < initialPrice) maxBid = initialPrice; sp.printItem(itemName, itemId, maxBid, nbOfBids, endDate); } while (rs.next()); if (page == 0) { sp.printItemFooter( "", "<a href=\"/rubis_servlets/servlet/edu.rice.rubis.servlets.SearchItemsByRegion?category=" + categoryId + "&region=" + regionId + "&page=" + (page + 1) + "&nbOfItems=" + nbOfItems + "\">Next page</a>"); } else { sp.printItemFooter( "<a href=\"/rubis_servlets/servlet/edu.rice.rubis.servlets.SearchItemsByRegion?category=" + categoryId + "&region=" + regionId + "&page=" + (page - 1) + "&nbOfItems=" + nbOfItems + "\">Previous page</a>", "<a href=\"/rubis_servlets/servlet/edu.rice.rubis.servlets.SearchItemsByRegion?category=" + categoryId + "&region=" + regionId + "&page=" + (page + 1) + "&nbOfItems=" + nbOfItems + "\">Next page</a>"); } closeConnection(stmt, conn); } catch (Exception e) { sp.printHTML("Exception getting item list: " + e + "<br>"); closeConnection(stmt, conn); } } /* Read the parameters, lookup the remote category and region and build the web page with the list of items */ public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { Integer categoryId, regionId; Integer page; Integer nbOfItems; ServletPrinter sp = null; sp = new ServletPrinter(response, "SearchItemsByRegion"); String value = request.getParameter("category"); if ((value == null) || (value.equals(""))) { printError("You must provide a category!<br>", sp); return; } else categoryId = new Integer(value); value = request.getParameter("region"); if ((value == null) || (value.equals(""))) { printError("You must provide a region!<br>", sp); return; } else regionId = new Integer(value); value = request.getParameter("page"); if ((value == null) || (value.equals(""))) page = new Integer(0); else page = new Integer(value); value = request.getParameter("nbOfItems"); if ((value == null) || (value.equals(""))) nbOfItems = new Integer(25); else nbOfItems = new Integer(value); sp.printHTMLheader("RUBiS: Search items by region"); itemList(categoryId, regionId, page.intValue(), nbOfItems.intValue(), sp); sp.printHTMLfooter(); } public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { doGet(request, response); } /** * Clean up the connection pool. */ public void destroy() { super.destroy(); } }
/* * The MIT License * * Copyright 2015 Red Hat, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.cli; import hudson.FilePath; import hudson.model.FreeStyleProject; import hudson.model.Job; import hudson.tasks.Shell; import jenkins.model.Jenkins; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; import java.io.File; import static hudson.cli.CLICommandInvoker.Matcher.failedWith; import static hudson.cli.CLICommandInvoker.Matcher.hasNoStandardOutput; import static hudson.cli.CLICommandInvoker.Matcher.succeededSilently; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; /** * @author pjanouse */ public class ReloadJobCommandTest { private CLICommandInvoker command; @Rule public final JenkinsRule j = new JenkinsRule(); @Before public void setUp() { command = new CLICommandInvoker(j, "reload-job"); } @Test public void reloadJobShouldFailWithoutJobConfigurePermission() throws Exception { FreeStyleProject project = j.createFreeStyleProject("aProject"); project.getBuildersList().add(new Shell("echo 1")); assertThat(project.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Jenkins.READ) .invokeWithArgs("aProject"); assertThat(result, failedWith(6)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("ERROR: user is missing the Job/Configure permission")); assertThat(project.scheduleBuild2(0).get().getLog(), containsString("echo 1")); } @Test public void reloadJobShouldFailWithoutJobReadPermission() throws Exception { FreeStyleProject project = j.createFreeStyleProject("aProject"); project.getBuildersList().add(new Shell("echo 1")); assertThat(project.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("aProject"); assertThat(result, failedWith(3)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("ERROR: No such job \u2018aProject\u2019 exists.")); assertThat(project.scheduleBuild2(0).get().getLog(), containsString("echo 1")); } @Test public void reloadJobShouldSucceed() throws Exception { FreeStyleProject project = j.createFreeStyleProject("aProject"); project.getBuildersList().add(new Shell("echo 1")); assertThat(project.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("aProject"); assertThat(result, succeededSilently()); assertThat(project.scheduleBuild2(0).get().getLog(), containsString("echo 2")); } @Test public void reloadJobShouldFailIfJobDoesNotExist() throws Exception { final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("never_created"); assertThat(result, failedWith(3)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("ERROR: No such job \u2018never_created\u2019 exists.")); } @Test public void reloadJobShouldFailIfJobDoesNotExistButNearExists() throws Exception { FreeStyleProject project = j.createFreeStyleProject("never_created"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("never_created1"); assertThat(result, failedWith(3)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("ERROR: No such job \u2018never_created1\u2019 exists. Perhaps you meant \u2018never_created\u2019?")); } @Test public void reloadJobManyShouldSucceed() throws Exception { FreeStyleProject project1 = j.createFreeStyleProject("aProject1"); project1.getBuildersList().add(new Shell("echo 1")); FreeStyleProject project2 = j.createFreeStyleProject("aProject2"); project2.getBuildersList().add(new Shell("echo 1")); FreeStyleProject project3 = j.createFreeStyleProject("aProject3"); project3.getBuildersList().add(new Shell("echo 1")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 1")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 1")); assertThat(project3.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project1, "echo 1", "echo 2"); changeProjectOnTheDisc(project2, "echo 1", "echo 2"); changeProjectOnTheDisc(project3, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("aProject1", "aProject2", "aProject3"); assertThat(result, succeededSilently()); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 2")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 2")); assertThat(project3.scheduleBuild2(0).get().getLog(), containsString("echo 2")); } @Test public void reloadJobManyShouldFailIfFirstJobDoesNotExist() throws Exception { FreeStyleProject project1 = j.createFreeStyleProject("aProject1"); project1.getBuildersList().add(new Shell("echo 1")); FreeStyleProject project2 = j.createFreeStyleProject("aProject2"); project2.getBuildersList().add(new Shell("echo 1")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 1")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project1, "echo 1", "echo 2"); changeProjectOnTheDisc(project2, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("never_created", "aProject1", "aProject2"); assertThat(result, failedWith(5)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("never_created: No such job \u2018never_created\u2019 exists.")); assertThat(result.stderr(), containsString("ERROR: Error occured while performing this command, see previous stderr output.")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 2")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 2")); } @Test public void reloadJobManyShouldFailIfMiddleJobDoesNotExist() throws Exception { FreeStyleProject project1 = j.createFreeStyleProject("aProject1"); project1.getBuildersList().add(new Shell("echo 1")); FreeStyleProject project2 = j.createFreeStyleProject("aProject2"); project2.getBuildersList().add(new Shell("echo 1")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 1")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project1, "echo 1", "echo 2"); changeProjectOnTheDisc(project2, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("aProject1", "never_created", "aProject2"); assertThat(result, failedWith(5)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("never_created: No such job \u2018never_created\u2019 exists.")); assertThat(result.stderr(), containsString("ERROR: Error occured while performing this command, see previous stderr output.")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 2")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 2")); } @Test public void reloadJobManyShouldFailIfLastJobDoesNotExist() throws Exception { FreeStyleProject project1 = j.createFreeStyleProject("aProject1"); project1.getBuildersList().add(new Shell("echo 1")); FreeStyleProject project2 = j.createFreeStyleProject("aProject2"); project2.getBuildersList().add(new Shell("echo 1")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 1")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project1, "echo 1", "echo 2"); changeProjectOnTheDisc(project2, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("aProject1", "aProject2", "never_created"); assertThat(result, failedWith(5)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("never_created: No such job \u2018never_created\u2019 exists.")); assertThat(result.stderr(), containsString("ERROR: Error occured while performing this command, see previous stderr output.")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 2")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 2")); } @Test public void reloadJobManyShouldFailIfMoreJobsDoNotExist() throws Exception { FreeStyleProject project1 = j.createFreeStyleProject("aProject1"); project1.getBuildersList().add(new Shell("echo 1")); FreeStyleProject project2 = j.createFreeStyleProject("aProject2"); project2.getBuildersList().add(new Shell("echo 1")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 1")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project1, "echo 1", "echo 2"); changeProjectOnTheDisc(project2, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("aProject1", "never_created1", "never_created2", "aProject2"); assertThat(result, failedWith(5)); assertThat(result, hasNoStandardOutput()); assertThat(result.stderr(), containsString("never_created1: No such job \u2018never_created1\u2019 exists.")); assertThat(result.stderr(), containsString("never_created2: No such job \u2018never_created2\u2019 exists.")); assertThat(result.stderr(), containsString("ERROR: Error occured while performing this command, see previous stderr output.")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 2")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 2")); } @Test public void reloadJobManyShouldSucceedEvenAJobIsSpecifiedTwice() throws Exception { FreeStyleProject project1 = j.createFreeStyleProject("aProject1"); project1.getBuildersList().add(new Shell("echo 1")); FreeStyleProject project2 = j.createFreeStyleProject("aProject2"); project2.getBuildersList().add(new Shell("echo 1")); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 1")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 1")); changeProjectOnTheDisc(project1, "echo 1", "echo 2"); changeProjectOnTheDisc(project2, "echo 1", "echo 2"); final CLICommandInvoker.Result result = command .authorizedTo(Job.READ, Job.CONFIGURE, Jenkins.READ) .invokeWithArgs("aProject1", "aProject2", "aProject1"); assertThat(result, succeededSilently()); assertThat(project1.scheduleBuild2(0).get().getLog(), containsString("echo 2")); assertThat(project2.scheduleBuild2(0).get().getLog(), containsString("echo 2")); } /** * Modify a project directly on the disc * * @param project modified project * @param oldstr old configuration item - for rewrite * @param newstr rew configuration item - after rewrite * @throws Exception if an issue occurred */ private void changeProjectOnTheDisc(final FreeStyleProject project, final String oldstr, final String newstr) throws Exception { FilePath fp = new FilePath(new File(project.getRootDir()+"/config.xml")); fp.write(fp.readToString().replace(oldstr, newstr), null); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.LocalJobRunner; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.ReflectionUtils; import org.junit.Test; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * Stress tests for the LocalJobRunner */ public class TestLocalRunner { private static final Log LOG = LogFactory.getLog(TestLocalRunner.class); private static int INPUT_SIZES[] = new int[] { 50000, 500, 500, 20, 5000, 500}; private static int OUTPUT_SIZES[] = new int[] { 1, 500, 500, 500, 500, 500}; private static int SLEEP_INTERVALS[] = new int[] { 10000, 15, 15, 20, 250, 60 }; private static class StressMapper extends Mapper<LongWritable, Text, LongWritable, Text> { // Different map tasks operate at different speeds. // We define behavior for 6 threads. private int threadId; // Used to ensure that the compiler doesn't optimize away // some code. public long exposedState; protected void setup(Context context) { // Get the thread num from the file number. FileSplit split = (FileSplit) context.getInputSplit(); Path filePath = split.getPath(); String name = filePath.getName(); this.threadId = Integer.valueOf(name); LOG.info("Thread " + threadId + " : " + context.getInputSplit()); } /** Map method with different behavior based on the thread id */ public void map(LongWritable key, Text val, Context c) throws IOException, InterruptedException { // Write many values quickly. for (int i = 0; i < OUTPUT_SIZES[threadId]; i++) { c.write(new LongWritable(0), val); if (i % SLEEP_INTERVALS[threadId] == 1) { Thread.sleep(1); } } } protected void cleanup(Context context) { // Output this here, to ensure that the incrementing done in map() // cannot be optimized away. LOG.debug("Busy loop counter: " + this.exposedState); } } private static class CountingReducer extends Reducer<LongWritable, Text, LongWritable, LongWritable> { public void reduce(LongWritable key, Iterable<Text> vals, Context context) throws IOException, InterruptedException { long out = 0; for (Text val : vals) { out++; } context.write(key, new LongWritable(out)); } } private static class GCMapper extends Mapper<LongWritable, Text, LongWritable, Text> { public void map(LongWritable key, Text val, Context c) throws IOException, InterruptedException { // Create a whole bunch of objects. List<Integer> lst = new ArrayList<Integer>(); for (int i = 0; i < 20000; i++) { lst.add(new Integer(i)); } // Actually use this list, to ensure that it isn't just optimized away. int sum = 0; for (int x : lst) { sum += x; } // throw away the list and run a GC. lst = null; System.gc(); c.write(new LongWritable(sum), val); } } /** * Create a single input file in the input directory. * @param dirPath the directory in which the file resides * @param id the file id number * @param numRecords how many records to write to each file. */ private void createInputFile(Path dirPath, int id, int numRecords) throws IOException { final String MESSAGE = "This is a line in a file: "; Path filePath = new Path(dirPath, "" + id); Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); OutputStream os = fs.create(filePath); BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os)); for (int i = 0; i < numRecords; i++) { w.write(MESSAGE + id + " " + i + "\n"); } w.close(); } // This is the total number of map output records we expect to generate, // based on input file sizes (see createMultiMapsInput()) and the behavior // of the different StressMapper threads. private static int TOTAL_RECORDS = 0; static { for (int i = 0; i < 6; i++) { TOTAL_RECORDS += INPUT_SIZES[i] * OUTPUT_SIZES[i]; } } private final String INPUT_DIR = "multiMapInput"; private final String OUTPUT_DIR = "multiMapOutput"; private Path getInputPath() { String dataDir = System.getProperty("test.build.data"); if (null == dataDir) { return new Path(INPUT_DIR); } else { return new Path(new Path(dataDir), INPUT_DIR); } } private Path getOutputPath() { String dataDir = System.getProperty("test.build.data"); if (null == dataDir) { return new Path(OUTPUT_DIR); } else { return new Path(new Path(dataDir), OUTPUT_DIR); } } /** * Create the inputs for the MultiMaps test. * @return the path to the input directory. */ private Path createMultiMapsInput() throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); Path inputPath = getInputPath(); // Clear the input directory if it exists, first. if (fs.exists(inputPath)) { fs.delete(inputPath, true); } // Create input files, with sizes calibrated based on // the amount of work done in each mapper. for (int i = 0; i < 6; i++) { createInputFile(inputPath, i, INPUT_SIZES[i]); } return inputPath; } /** * Verify that we got the correct amount of output. */ private void verifyOutput(Path outputPath) throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); Path outputFile = new Path(outputPath, "part-r-00000"); InputStream is = fs.open(outputFile); BufferedReader r = new BufferedReader(new InputStreamReader(is)); // Should get a single line of the form "0\t(count)" String line = r.readLine().trim(); assertTrue("Line does not have correct key", line.startsWith("0\t")); int count = Integer.valueOf(line.substring(2)); assertEquals("Incorrect count generated!", TOTAL_RECORDS, count); r.close(); } /** * Test that the GC counter actually increments when we know that we've * spent some time in the GC during the mapper. */ @Test public void testGcCounter() throws Exception { Path inputPath = getInputPath(); Path outputPath = getOutputPath(); Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); // Clear input/output dirs. if (fs.exists(outputPath)) { fs.delete(outputPath, true); } if (fs.exists(inputPath)) { fs.delete(inputPath, true); } // Create one input file createInputFile(inputPath, 0, 20); // Now configure and run the job. Job job = Job.getInstance(); job.setMapperClass(GCMapper.class); job.setNumReduceTasks(0); job.getConfiguration().set(MRJobConfig.IO_SORT_MB, "25"); FileInputFormat.addInputPath(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); boolean ret = job.waitForCompletion(true); assertTrue("job failed", ret); // This job should have done *some* gc work. // It had to clean up 400,000 objects. // We strongly suspect this will result in a few milliseconds effort. Counter gcCounter = job.getCounters().findCounter( TaskCounter.GC_TIME_MILLIS); assertNotNull(gcCounter); assertTrue("No time spent in gc", gcCounter.getValue() > 0); } /** * Run a test with several mappers in parallel, operating at different * speeds. Verify that the correct amount of output is created. */ @Test(timeout=120*1000) public void testMultiMaps() throws Exception { Job job = Job.getInstance(); Path inputPath = createMultiMapsInput(); Path outputPath = getOutputPath(); Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); if (fs.exists(outputPath)) { fs.delete(outputPath, true); } job.setMapperClass(StressMapper.class); job.setReducerClass(CountingReducer.class); job.setNumReduceTasks(1); LocalJobRunner.setLocalMaxRunningMaps(job, 6); job.getConfiguration().set(MRJobConfig.IO_SORT_MB, "25"); FileInputFormat.addInputPath(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); final Thread toInterrupt = Thread.currentThread(); Thread interrupter = new Thread() { public void run() { try { Thread.sleep(120*1000); // 2m toInterrupt.interrupt(); } catch (InterruptedException ie) {} } }; LOG.info("Submitting job..."); job.submit(); LOG.info("Starting thread to interrupt main thread in 2 minutes"); interrupter.start(); LOG.info("Waiting for job to complete..."); try { job.waitForCompletion(true); } catch (InterruptedException ie) { LOG.fatal("Interrupted while waiting for job completion", ie); for (int i = 0; i < 10; i++) { LOG.fatal("Dumping stacks"); ReflectionUtils.logThreadInfo(LOG, "multimap threads", 0); Thread.sleep(1000); } throw ie; } LOG.info("Job completed, stopping interrupter"); interrupter.interrupt(); try { interrupter.join(); } catch (InterruptedException ie) { // it might interrupt us right as we interrupt it } LOG.info("Verifying output"); verifyOutput(outputPath); } /** * Run a test with a misconfigured number of mappers. * Expect failure. */ @Test public void testInvalidMultiMapParallelism() throws Exception { Job job = Job.getInstance(); Path inputPath = createMultiMapsInput(); Path outputPath = getOutputPath(); Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); if (fs.exists(outputPath)) { fs.delete(outputPath, true); } job.setMapperClass(StressMapper.class); job.setReducerClass(CountingReducer.class); job.setNumReduceTasks(1); LocalJobRunner.setLocalMaxRunningMaps(job, -6); FileInputFormat.addInputPath(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); boolean success = job.waitForCompletion(true); assertFalse("Job succeeded somehow", success); } /** An IF that creates no splits */ private static class EmptyInputFormat extends InputFormat<Object, Object> { public List<InputSplit> getSplits(JobContext context) { return new ArrayList<InputSplit>(); } public RecordReader<Object, Object> createRecordReader(InputSplit split, TaskAttemptContext context) { return new EmptyRecordReader(); } } private static class EmptyRecordReader extends RecordReader<Object, Object> { public void initialize(InputSplit split, TaskAttemptContext context) { } public Object getCurrentKey() { return new Object(); } public Object getCurrentValue() { return new Object(); } public float getProgress() { return 0.0f; } public void close() { } public boolean nextKeyValue() { return false; } } /** Test case for zero mappers */ @Test public void testEmptyMaps() throws Exception { Job job = Job.getInstance(); Path outputPath = getOutputPath(); Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); if (fs.exists(outputPath)) { fs.delete(outputPath, true); } job.setInputFormatClass(EmptyInputFormat.class); job.setNumReduceTasks(1); FileOutputFormat.setOutputPath(job, outputPath); boolean success = job.waitForCompletion(true); assertTrue("Empty job should work", success); } /** @return the directory where numberfiles are written (mapper inputs) */ private Path getNumberDirPath() { return new Path(getInputPath(), "numberfiles"); } /** * Write out an input file containing an integer. * * @param fileNum the file number to write to. * @param value the value to write to the file * @return the path of the written file. */ private Path makeNumberFile(int fileNum, int value) throws IOException { Path workDir = getNumberDirPath(); Path filePath = new Path(workDir, "file" + fileNum); Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); OutputStream os = fs.create(filePath); BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os)); w.write("" + value); w.close(); return filePath; } /** * Each record received by this mapper is a number 'n'. * Emit the values [0..n-1] */ public static class SequenceMapper extends Mapper<LongWritable, Text, Text, NullWritable> { public void map(LongWritable k, Text v, Context c) throws IOException, InterruptedException { int max = Integer.valueOf(v.toString()); for (int i = 0; i < max; i++) { c.write(new Text("" + i), NullWritable.get()); } } } private final static int NUMBER_FILE_VAL = 100; /** * Tally up the values and ensure that we got as much data * out as we put in. * Each mapper generated 'NUMBER_FILE_VAL' values (0..NUMBER_FILE_VAL-1). * Verify that across all our reducers we got exactly this much * data back. */ private void verifyNumberJob(int numMaps) throws Exception { Path outputDir = getOutputPath(); Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); FileStatus [] stats = fs.listStatus(outputDir); int valueSum = 0; for (FileStatus f : stats) { FSDataInputStream istream = fs.open(f.getPath()); BufferedReader r = new BufferedReader(new InputStreamReader(istream)); String line = null; while ((line = r.readLine()) != null) { valueSum += Integer.valueOf(line.trim()); } r.close(); } int maxVal = NUMBER_FILE_VAL - 1; int expectedPerMapper = maxVal * (maxVal + 1) / 2; int expectedSum = expectedPerMapper * numMaps; LOG.info("expected sum: " + expectedSum + ", got " + valueSum); assertEquals("Didn't get all our results back", expectedSum, valueSum); } /** * Run a test which creates a SequenceMapper / IdentityReducer * job over a set of generated number files. */ private void doMultiReducerTest(int numMaps, int numReduces, int parallelMaps, int parallelReduces) throws Exception { Path in = getNumberDirPath(); Path out = getOutputPath(); // Clear data from any previous tests. Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); if (fs.exists(out)) { fs.delete(out, true); } if (fs.exists(in)) { fs.delete(in, true); } for (int i = 0; i < numMaps; i++) { makeNumberFile(i, 100); } Job job = Job.getInstance(); job.setNumReduceTasks(numReduces); job.setMapperClass(SequenceMapper.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(NullWritable.class); FileInputFormat.addInputPath(job, in); FileOutputFormat.setOutputPath(job, out); LocalJobRunner.setLocalMaxRunningMaps(job, parallelMaps); LocalJobRunner.setLocalMaxRunningReduces(job, parallelReduces); boolean result = job.waitForCompletion(true); assertTrue("Job failed!!", result); verifyNumberJob(numMaps); } @Test public void testOneMapMultiReduce() throws Exception { doMultiReducerTest(1, 2, 1, 1); } @Test public void testOneMapMultiParallelReduce() throws Exception { doMultiReducerTest(1, 2, 1, 2); } @Test public void testMultiMapOneReduce() throws Exception { doMultiReducerTest(4, 1, 2, 1); } @Test public void testMultiMapMultiReduce() throws Exception { doMultiReducerTest(4, 4, 2, 2); } }
package no.arkivlab.innsyn.models.n5; import java.io.Serializable; import java.util.Date; import java.util.HashSet; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import org.hibernate.annotations.LazyCollection; import org.hibernate.annotations.LazyCollectionOption; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; @Entity @Table(name = "file") @Inheritance(strategy = InheritanceType.JOINED) public class File implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name = "pk_file_id", nullable = false, insertable = true, updatable = false) protected Long id; /** M001 - systemID (xs:string) */ @Column(name = "system_id") protected String systemId; /** M003 - mappeID (xs:string) */ @Column(name = "file_id") protected String fileId; /** M020 - tittel (xs:string) */ @Column(name = "title") protected String title; /** M025 - offentligTittel (xs:string) */ @Column(name = "official_title") protected String officialTitle; /** M021 - beskrivelse (xs:string) */ @Column(name = "description") protected String description; /** M300 - dokumentmedium (xs:string) */ @Column(name = "document_medium") protected String documentMedium; /** M600 - opprettetDato (xs:dateTime) */ @Column(name = "created_date") @Temporal(TemporalType.TIMESTAMP) protected Date createdDate; /** M601 - opprettetAv (xs:string) */ @Column(name = "created_by") protected String createdBy; /** Not Noark5, added to help keep integrity from n4 to n5 */ @Column(name = "created_by_id") protected Integer createdById; /** M602 - avsluttetDato (xs:dateTime) */ @Column(name = "finalised_date") @Temporal(TemporalType.TIMESTAMP) protected Date finalisedDate; /** M603 - avsluttetAv (xs:string) */ @Column(name = "finalised_by") protected String finalisedBy; // Link to StorageLocation @ManyToOne @JoinColumn(name = "file_storage_location_id", referencedColumnName = "pk_storage_location_id") @JsonBackReference protected StorageLocation referenceStorageLocation; // Links to Keywords @ManyToMany @JoinTable(name = "file_keyword", joinColumns = @JoinColumn(name = "f_pk_file_id", referencedColumnName = "pk_file_id"), inverseJoinColumns = @JoinColumn(name = "f_pk_keyword_id", referencedColumnName = "pk_keyword_id")) @JsonBackReference protected Set<Keyword> referenceKeyword = new HashSet<Keyword>(); // Link to parent File @ManyToOne @JsonBackReference protected File referenceParentFile; // Links to child Files @OneToMany(mappedBy = "referenceParentFile") @LazyCollection(LazyCollectionOption.EXTRA) @JsonManagedReference @JsonIgnore protected Set<File> referenceChildFile = new HashSet<File>(); // Link to Series @ManyToOne @JoinColumn(name = "file_series_id", referencedColumnName = "pk_series_id") @JsonBackReference protected Series referenceSeries; // Link to Class @ManyToOne @JoinColumn(name = "file_class_id", referencedColumnName = "pk_class_id") @JsonBackReference protected Class referenceClass; // Links to Records @OneToMany(mappedBy = "referenceFile") @LazyCollection(LazyCollectionOption.EXTRA) @JsonManagedReference @JsonIgnore protected Set<Record> referenceRecord = new HashSet<Record>(); public Long getId() { return id; } public String getSystemId() { return systemId; } public void setSystemId(String systemId) { this.systemId = systemId; } public String getFileId() { return fileId; } public void setFileId(String fileId) { this.fileId = fileId; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getOfficialTitle() { return officialTitle; } public void setOfficialTitle(String officialTitle) { this.officialTitle = officialTitle; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getDocumentMedium() { return documentMedium; } public void setDocumentMedium(String documentMedium) { this.documentMedium = documentMedium; } public Date getCreatedDate() { return createdDate; } public void setCreatedDate(Date createdDate) { this.createdDate = createdDate; } public String getCreatedBy() { return createdBy; } public void setCreatedBy(String createdBy) { this.createdBy = createdBy; } public Integer getCreatedById() { return createdById; } public void setCreatedById(Integer createdById) { this.createdById = createdById; } public Date getFinalisedDate() { return finalisedDate; } public void setFinalisedDate(Date finalisedDate) { this.finalisedDate = finalisedDate; } public String getFinalisedBy() { return finalisedBy; } public void setFinalisedBy(String finalisedBy) { this.finalisedBy = finalisedBy; } public StorageLocation getReferenceStorageLocation() { return referenceStorageLocation; } public void setReferenceStorageLocation( StorageLocation referenceStorageLocation) { this.referenceStorageLocation = referenceStorageLocation; } public Set<Keyword> getReferenceKeyword() { return referenceKeyword; } public void setReferenceKeyword(Set<Keyword> referenceKeyword) { this.referenceKeyword = referenceKeyword; } public File getReferenceParentFile() { return referenceParentFile; } public void setReferenceParentFile(File referenceParentFile) { this.referenceParentFile = referenceParentFile; } public Set<File> getReferenceChildFile() { return referenceChildFile; } public void setReferenceChildFile(Set<File> referenceChildFile) { this.referenceChildFile = referenceChildFile; } public Series getReferenceSeries() { return referenceSeries; } public void setReferenceSeries(Series referenceSeries) { this.referenceSeries = referenceSeries; } public Class getReferenceClass() { return referenceClass; } public void setReferenceClass(Class referenceClass) { this.referenceClass = referenceClass; } public Set<Record> getReferenceRecord() { return referenceRecord; } public void setReferenceRecord(Set<Record> referenceRecord) { this.referenceRecord = referenceRecord; } @Override public String toString() { return "File [id=" + id + ", systemId=" + systemId + ", fileId=" + fileId + ", title=" + title + ", officialTitle=" + officialTitle + ", description=" + description + ", documentMedium=" + documentMedium + ", createdDate=" + createdDate + ", createdBy=" + createdBy + ", finalisedDate=" + finalisedDate + ", finalisedBy=" + finalisedBy; } }
package io.corbel.iam.service; import static org.fest.assertions.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.anySet; import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import io.corbel.lib.queries.request.JsonAggregationResultsFactory; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; import org.springframework.dao.DataIntegrityViolationException; import io.corbel.iam.exception.DomainAlreadyExists; import io.corbel.iam.model.Domain; import io.corbel.iam.model.Scope; import io.corbel.iam.repository.DomainRepository; import com.google.common.collect.Sets; /** * @author Alexander De Leon * */ @RunWith(MockitoJUnitRunner.class) public class DefaultDomainServiceTest { public static final String SCOPE_A = "scopeA"; public static final String SCOPE_B = "scopeB"; public static final String SCOPE_C = "scopeC"; private static final String TEST_DOMAIN_ID = "some_domain"; private static final Domain TEST_DOMAIN = new Domain(); Scope scopeA; Scope scopeB; Scope scopeC; @Mock private DomainRepository domainRepositoryMock; @Mock private DefaultScopeService defaultScopeServiceMock; @Mock private EventsService eventsServiceMock; private DefaultDomainService domainService; @Before public void setup() { scopeA = mock(Scope.class); scopeB = mock(Scope.class); scopeC = mock(Scope.class); when(scopeA.getId()).thenReturn(SCOPE_A); when(scopeB.getId()).thenReturn(SCOPE_B); when(scopeC.getId()).thenReturn(SCOPE_C); domainService = new DefaultDomainService(domainRepositoryMock, defaultScopeServiceMock, eventsServiceMock, new JsonAggregationResultsFactory()); } @Test public void testNotAllowedScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(Sets.newHashSet(SCOPE_A, "scopeB")); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A, "scopeB"))).thenReturn( new HashSet<>(Arrays.asList(scopeA, scopeB))); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_C))).thenReturn(new HashSet<>(Arrays.asList(scopeC))); assertThat(domainService.scopesAllowedInDomain(Sets.newHashSet(SCOPE_C), TEST_DOMAIN)).isFalse(); } @SuppressWarnings("unchecked") @Test public void testNotExistScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(Sets.newHashSet(SCOPE_A, "scopeB")); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); when(defaultScopeServiceMock.expandScopes(anySet())).thenThrow(IllegalStateException.class); assertThat(domainService.scopesAllowedInDomain(Sets.newHashSet(SCOPE_C), TEST_DOMAIN)).isFalse(); } @Test public void testNotAllowedSomeScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(Sets.newHashSet(SCOPE_A, "scopeB")); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A, "scopeB"))).thenReturn( new HashSet<>(Arrays.asList(scopeA, scopeB))); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A, SCOPE_C))).thenReturn( new HashSet<>(Arrays.asList(scopeA, scopeC))); assertThat(domainService.scopesAllowedInDomain(Sets.newHashSet(SCOPE_A, SCOPE_C), TEST_DOMAIN)).isFalse(); } @Test public void testAllowedScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(Sets.newHashSet(SCOPE_A, "scopeB")); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A))).thenReturn(new HashSet<>(Arrays.asList(scopeA))); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A, "scopeB"))).thenReturn( new HashSet<>(Arrays.asList(scopeA, scopeB))); assertThat(domainService.scopesAllowedInDomain(Sets.newHashSet(SCOPE_A), domain)).isTrue(); } @Test public void testDomainWithNoScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(Sets.<String>newHashSet()); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A))).thenReturn(new HashSet<>(Arrays.asList(scopeA))); assertThat(domainService.scopesAllowedInDomain(Sets.newHashSet(SCOPE_A), TEST_DOMAIN)).isFalse(); } @Test public void testDomainNullScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(null); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A))).thenReturn(new HashSet<>(Arrays.asList(scopeA))); assertThat(domainService.scopesAllowedInDomain(Sets.newHashSet(SCOPE_A), TEST_DOMAIN)).isFalse(); } @Test public void testNullScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(Sets.newHashSet(SCOPE_A, "scopeB")); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A, "scopeB"))).thenReturn( new HashSet<>(Arrays.asList(scopeA, scopeB))); assertThat(domainService.scopesAllowedInDomain(null, TEST_DOMAIN)).isTrue(); } @Test public void testEmptyScopes() { Domain domain = mock(Domain.class); when(domain.getScopes()).thenReturn(Sets.newHashSet(SCOPE_A, "scopeB")); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); when(defaultScopeServiceMock.expandScopes(Sets.newHashSet(SCOPE_A, "scopeB"))).thenReturn( new HashSet<>(Arrays.asList(scopeA, scopeB))); assertThat(domainService.scopesAllowedInDomain(Sets.<String>newHashSet(), TEST_DOMAIN)).isTrue(); } @SuppressWarnings("unchecked") @Test public void testOAuthServiceAllowed() { String oAuthService = "testService"; Domain domain = mock(Domain.class); HashMap map = mock(HashMap.class); when(map.get(oAuthService)).thenReturn(new HashMap<String, String>()); when(domain.getAuthConfigurations()).thenReturn(map); assertThat(domainService.oAuthServiceAllowedInDomain(oAuthService, domain)).isEqualTo(true); } @SuppressWarnings("unchecked") @Test public void testOAuthServiceNotAllowed() { String oAuthService = "testService"; Domain domain = mock(Domain.class); when(domain.getAuthConfigurations()).thenReturn(null); HashMap map = mock(HashMap.class); when(map.get(oAuthService)).thenReturn(null); when(domain.getAuthConfigurations()).thenReturn(map); when(domainRepositoryMock.findOne(TEST_DOMAIN_ID)).thenReturn(domain); assertThat(domainService.oAuthServiceAllowedInDomain(oAuthService, TEST_DOMAIN)).isEqualTo(false); } @Test public void testCreate() throws DomainAlreadyExists { Domain domain = new Domain(); domain.setId(TEST_DOMAIN_ID); ArgumentCaptor<Domain> domainCaptor = ArgumentCaptor.forClass(Domain.class); domainService.insert(domain); verify(domainRepositoryMock).insert(domainCaptor.capture()); assertEquals(TEST_DOMAIN_ID, domainCaptor.getValue().getId()); } @SuppressWarnings("unchecked") @Test(expected = DomainAlreadyExists.class) public void testCreateAlreadyExisting() throws DomainAlreadyExists { Mockito.doThrow(DataIntegrityViolationException.class).when(domainRepositoryMock).insert(any(Domain.class)); domainService.insert(TEST_DOMAIN); } @Test public void testUpdate() { domainService.update(TEST_DOMAIN); verify(domainRepositoryMock).patch(TEST_DOMAIN); } @Test public void testDelete() { domainService.delete(TEST_DOMAIN.getId()); verify(domainRepositoryMock).delete(TEST_DOMAIN.getId()); verify(eventsServiceMock).sendDomainDeletedEvent(TEST_DOMAIN.getId()); verify(eventsServiceMock).sendUpdateDomainPublicScopesEvent(TEST_DOMAIN.getId()); verifyNoMoreInteractions(domainRepositoryMock, eventsServiceMock); } }
package org.fhir.delphi; import java.util.List; import org.hl7.fhir.utilities.Utilities; /* Copyright (c) 2011+, HL7, Inc All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ public class GeneratorUtils { public static class NamedElementGroup { public String getName() { return name; } public void setName(String name) { this.name = name; } private String name; private List<ElementDefn> elements; public List<ElementDefn> getElements() { return elements; } public void setElements(List<ElementDefn> elements) { this.elements = elements; } } public static boolean isCSharpReservedWord(String word) { if (word.equals("abstract")) return true; if (word.equals("as")) return true; if (word.equals("base")) return true; if (word.equals("bool")) return true; if (word.equals("break")) return true; if (word.equals("byte")) return true; if (word.equals("case")) return true; if (word.equals("catch")) return true; if (word.equals("char")) return true; if (word.equals("checked")) return true; if (word.equals("class")) return true; if (word.equals("const")) return true; if (word.equals("continue")) return true; if (word.equals("decimal")) return true; if (word.equals("default")) return true; if (word.equals("delegate")) return true; if (word.equals("do")) return true; if (word.equals("double")) return true; if (word.equals("else")) return true; if (word.equals("enum")) return true; if (word.equals("event")) return true; if (word.equals("explicit")) return true; if (word.equals("extern")) return true; if (word.equals("false")) return true; if (word.equals("finally")) return true; if (word.equals("fixed")) return true; if (word.equals("float")) return true; if (word.equals("for")) return true; if (word.equals("foreach")) return true; if (word.equals("goto")) return true; if (word.equals("if")) return true; if (word.equals("implicit")) return true; if (word.equals("in")) return true; if (word.equals("in(genericmodifier)")) return true; if (word.equals("int")) return true; if (word.equals("interface")) return true; if (word.equals("internal")) return true; if (word.equals("is")) return true; if (word.equals("lock")) return true; if (word.equals("long")) return true; if (word.equals("namespace")) return true; if (word.equals("new")) return true; if (word.equals("null")) return true; if (word.equals("object")) return true; if (word.equals("operator")) return true; if (word.equals("out")) return true; if (word.equals("out(genericmodifier)")) return true; if (word.equals("override")) return true; if (word.equals("params")) return true; if (word.equals("private")) return true; if (word.equals("protected")) return true; if (word.equals("public")) return true; if (word.equals("readonly")) return true; if (word.equals("ref")) return true; if (word.equals("return")) return true; if (word.equals("sbyte")) return true; if (word.equals("sealed")) return true; if (word.equals("short")) return true; if (word.equals("sizeof")) return true; if (word.equals("stackalloc")) return true; if (word.equals("static")) return true; if (word.equals("string")) return true; if (word.equals("struct")) return true; if (word.equals("switch")) return true; if (word.equals("this")) return true; if (word.equals("throw")) return true; if (word.equals("true")) return true; if (word.equals("try")) return true; if (word.equals("typeof")) return true; if (word.equals("uint")) return true; if (word.equals("ulong")) return true; if (word.equals("unchecked")) return true; if (word.equals("unsafe")) return true; if (word.equals("ushort")) return true; if (word.equals("using")) return true; if (word.equals("virtual")) return true; if (word.equals("void")) return true; if (word.equals("volatile")) return true; if (word.equals("while")) return true; return false; } public static boolean isJavaReservedWord(String word) { if (word.equals("abstract")) return true; if (word.equals("assert")) return true; if (word.equals("boolean")) return true; if (word.equals("break")) return true; if (word.equals("byte")) return true; if (word.equals("case")) return true; if (word.equals("catch")) return true; if (word.equals("char")) return true; if (word.equals("class")) return true; if (word.equals("const")) return true; if (word.equals("continue")) return true; if (word.equals("default")) return true; if (word.equals("double")) return true; if (word.equals("do")) return true; if (word.equals("else")) return true; if (word.equals("enum")) return true; if (word.equals("extends")) return true; if (word.equals("false")) return true; if (word.equals("final")) return true; if (word.equals("finally")) return true; if (word.equals("float")) return true; if (word.equals("for")) return true; if (word.equals("goto")) return true; if (word.equals("if")) return true; if (word.equals("implements")) return true; if (word.equals("import")) return true; if (word.equals("instanceof")) return true; if (word.equals("int")) return true; if (word.equals("interface")) return true; if (word.equals("long")) return true; if (word.equals("native")) return true; if (word.equals("new")) return true; if (word.equals("null")) return true; if (word.equals("package")) return true; if (word.equals("private")) return true; if (word.equals("protected")) return true; if (word.equals("public")) return true; if (word.equals("return")) return true; if (word.equals("short")) return true; if (word.equals("static")) return true; if (word.equals("strictfp")) return true; if (word.equals("super")) return true; if (word.equals("switch")) return true; if (word.equals("synchronized")) return true; if (word.equals("this")) return true; if (word.equals("throw")) return true; if (word.equals("throws")) return true; if (word.equals("transient")) return true; if (word.equals("true")) return true; if (word.equals("try")) return true; if (word.equals("void")) return true; if (word.equals("volatile")) return true; if (word.equals("while")) return true; if (word.equals("Exception")) return true; return false; } public static boolean isDelphiReservedWord(String word) { if (word.equals("and")) return true; if (word.equals("array")) return true; if (word.equals("as")) return true; if (word.equals("asm")) return true; if (word.equals("begin")) return true; if (word.equals("case")) return true; if (word.equals("class")) return true; if (word.equals("const")) return true; if (word.equals("constructor")) return true; if (word.equals("create")) return true; if (word.equals("destructor")) return true; if (word.equals("dispinterface")) return true; if (word.equals("div")) return true; if (word.equals("do")) return true; if (word.equals("downto")) return true; if (word.equals("else")) return true; if (word.equals("end")) return true; if (word.equals("except")) return true; if (word.equals("exports")) return true; if (word.equals("file")) return true; if (word.equals("finalization")) return true; if (word.equals("finally")) return true; if (word.equals("for")) return true; if (word.equals("function")) return true; if (word.equals("goto")) return true; if (word.equals("if")) return true; if (word.equals("implementation")) return true; if (word.equals("in")) return true; if (word.equals("inherited")) return true; if (word.equals("initialization")) return true; if (word.equals("inline")) return true; if (word.equals("interface")) return true; if (word.equals("is")) return true; if (word.equals("label")) return true; if (word.equals("library")) return true; if (word.equals("link")) return true; if (word.equals("mod")) return true; if (word.equals("nil")) return true; if (word.equals("not")) return true; if (word.equals("object")) return true; if (word.equals("of")) return true; if (word.equals("or")) return true; if (word.equals("out")) return true; if (word.equals("packed")) return true; if (word.equals("procedure")) return true; if (word.equals("program")) return true; if (word.equals("property")) return true; if (word.equals("raise")) return true; if (word.equals("record")) return true; if (word.equals("repeat")) return true; if (word.equals("resourcestring")) return true; if (word.equals("set")) return true; if (word.equals("shl")) return true; if (word.equals("shr")) return true; if (word.equals("string")) return true; if (word.equals("then")) return true; if (word.equals("threadvar")) return true; if (word.equals("to")) return true; if (word.equals("try")) return true; if (word.equals("type")) return true; if (word.equals("unit")) return true; if (word.equals("until")) return true; if (word.equals("uses")) return true; if (word.equals("var")) return true; if (word.equals("while")) return true; if (word.equals("with")) return true; if (word.equals("xor")) return true; return false; } public static String mapPrimitiveToCSharpType(String name) throws Exception { if (name.equals("boolean")) return "bool?"; else if (name.equals("integer")) return "int?"; else if (name.equals("positiveInt")) return "int?"; else if (name.equals("unsignedInt")) return "int?"; else if (name.equals("decimal")) return "decimal?"; else if (name.equals("base64Binary")) return "byte[]"; else if (name.equals("instant")) return "DateTimeOffset?"; else if (name.equals("string")) return "string"; else if (name.equals("uri")) return "string"; else if (name.equals("code")) return "string"; else if (name.equals("oid")) return "string"; else if (name.equals("markdown")) return "string"; else if (name.equals("uuid")) return "string"; else if (name.equals("sid")) return "string"; else if (name.equals("id")) return "string"; else if (name.equals("xhtml")) return "string"; else if (name.equals("date")) return "string"; else if (name.equals("dateTime")) return "string"; else if (name.equals("time")) return "string"; else throw new Exception( "Unrecognized primitive " + name ); } public static String mapPrimitiveToFhirCSharpType(String name) throws Exception { if (name.equals("boolean")) return "FhirBoolean"; else if (name.equals("integer")) return "Integer"; else if (name.equals("positiveInt")) return "PositiveInt"; else if (name.equals("unsignedInt")) return "UnsignedInt"; else if (name.equals("decimal")) return "FhirDecimal"; else if (name.equals("base64Binary")) return "Base64Binary"; else if (name.equals("instant")) return "Instant"; else if (name.equals("string")) return "FhirString"; else if (name.equals("markdown")) return "Markdown"; else if (name.equals("uri")) return "FhirUri"; else if (name.equals("code")) return "Code"; else if (name.equals("oid")) return "Oid"; else if (name.equals("uuid")) return "Uuid"; else if (name.equals("sid")) return "Sid"; else if (name.equals("id")) return "Id"; else if (name.equals("xhtml")) return "FhirString"; else if (name.equals("xml:lang")) return "FhirString"; else if (name.equals("date")) return "Date"; else if (name.equals("time")) return "Time"; else if (name.equals("dateTime")) return "FhirDateTime"; else throw new Exception( "Unrecognized primitive " + name ); } public static String generateCSharpTypeName(String name) throws Exception { String result; name = name.replace("-", ""); if( Character.isLowerCase(name.charAt(0)) ) result = mapPrimitiveToFhirCSharpType(name); else { result = Utilities.capitalize(name); if(result.equals("Reference")) result = "ResourceReference"; } return result; } public static String makeCsStringLiteral(String contents) { contents = "@" + "\"" + contents.replace("\"", "\"\"") + "\""; return contents; } private static final String HL7NAMESPACE = "Hl7.Fhir.Model"; public static String buildFullyScopedTypeName( String fullName ) throws Exception { //String[] nameParts = fullName == null ? "DomainResource".split("\\.") : fullName.split("\\."); String[] nameParts = fullName.split("\\."); if( nameParts.length == 1 ) // Globally defined name return HL7NAMESPACE + "." + GeneratorUtils.generateCSharpTypeName(nameParts[0]); else return HL7NAMESPACE + "." + GeneratorUtils.generateCSharpTypeName(nameParts[0]) + "." + GeneratorUtils.generateCSharpTypeName(nameParts[1]); } public static String buildFullyScopedBindingTypeName( String fullName ) throws Exception { //String[] nameParts = fullName == null ? "DomainResource".split("\\.") : fullName.split("\\."); String[] nameParts = fullName.split("\\."); if( nameParts.length == 1 ) // Globally defined name return HL7NAMESPACE + "." + GeneratorUtils.generateCSharpTypeName(Utilities.capitalize(nameParts[0])); else return HL7NAMESPACE + "." + GeneratorUtils.generateCSharpTypeName(nameParts[0]) + "." + GeneratorUtils.generateCSharpTypeName(Utilities.capitalize(nameParts[1])); } public static String buildFullyScopedSerializerTypeName( String fullName ) throws Exception { String[] nameParts = fullName.split("\\."); return GeneratorUtils.generateCSharpTypeName(nameParts[0]) + "Serializer"; } public static String generateCSharpEnumMemberName(String name) { String result = name; if (result.equals("<")) result = "LessThan"; else if (result.equals("<=")) result = "LessOrEqual"; else if (result.equals(">")) result = "GreaterThan"; else if (result.equals(">=")) result = "GreaterOrEqual"; else if (result.equals("=")) result = "Equal"; if (result.startsWith("-")) result = result.replace("-", "Minus"); if (Utilities.isInteger(result)) result = "N" + result; result = result.replace("-", "_"); result = result.replace("+", "Plus"); result = Utilities.camelCase(result); result = Utilities.capitalize(result); return result; } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.bookkeeper.util.collections; import static com.google.common.base.Preconditions.checkArgument; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.concurrent.locks.StampedLock; /** * Concurrent hash set for primitive longs. * * <p>Provides similar methods as a ConcurrentSet&lt;Long&gt; but since it's an open hash map with linear probing, * no node allocations are required to store the values. * * <p>Items <strong>MUST</strong> be &gt;= 0. */ public class ConcurrentLongHashSet { private static final long EmptyItem = -1L; private static final long DeletedItem = -2L; private static final float SetFillFactor = 0.66f; private static final int DefaultExpectedItems = 256; private static final int DefaultConcurrencyLevel = 16; private final Section[] sections; /** * A consumer of long values. */ public interface ConsumerLong { void accept(long item); } public ConcurrentLongHashSet() { this(DefaultExpectedItems); } public ConcurrentLongHashSet(int expectedItems) { this(expectedItems, DefaultConcurrencyLevel); } public ConcurrentLongHashSet(int expectedItems, int concurrencyLevel) { checkArgument(expectedItems > 0); checkArgument(concurrencyLevel > 0); checkArgument(expectedItems >= concurrencyLevel); int numSections = concurrencyLevel; int perSectionExpectedItems = expectedItems / numSections; int perSectionCapacity = (int) (perSectionExpectedItems / SetFillFactor); this.sections = new Section[numSections]; for (int i = 0; i < numSections; i++) { sections[i] = new Section(perSectionCapacity); } } public long size() { long size = 0; for (Section s : sections) { size += s.size; } return size; } public long capacity() { long capacity = 0; for (Section s : sections) { capacity += s.capacity; } return capacity; } public boolean isEmpty() { for (Section s : sections) { if (s.size != 0) { return false; } } return true; } long getUsedBucketCount() { long usedBucketCount = 0; for (Section s : sections) { usedBucketCount += s.usedBuckets; } return usedBucketCount; } public boolean contains(long item) { checkBiggerEqualZero(item); long h = hash(item); return getSection(h).contains(item, (int) h); } public boolean add(long item) { checkBiggerEqualZero(item); long h = hash(item); return getSection(h).add(item, (int) h); } /** * Remove an existing entry if found. * * @param item * @return true if removed or false if item was not present */ public boolean remove(long item) { checkBiggerEqualZero(item); long h = hash(item); return getSection(h).remove(item, (int) h); } private Section getSection(long hash) { // Use 32 msb out of long to get the section final int sectionIdx = (int) (hash >>> 32) & (sections.length - 1); return sections[sectionIdx]; } public void clear() { for (Section s : sections) { s.clear(); } } public void forEach(ConsumerLong processor) { for (Section s : sections) { s.forEach(processor); } } /** * @return a new list of all keys (makes a copy) */ public Set<Long> items() { Set<Long> items = new HashSet<>(); forEach(items::add); return items; } // A section is a portion of the hash map that is covered by a single @SuppressWarnings("serial") private static final class Section extends StampedLock { // Keys and values are stored interleaved in the table array private volatile long[] table; private volatile int capacity; private volatile int size; private int usedBuckets; private int resizeThreshold; Section(int capacity) { this.capacity = alignToPowerOfTwo(capacity); this.table = new long[this.capacity]; this.size = 0; this.usedBuckets = 0; this.resizeThreshold = (int) (this.capacity * SetFillFactor); Arrays.fill(table, EmptyItem); } boolean contains(long item, int hash) { long stamp = tryOptimisticRead(); boolean acquiredLock = false; int bucket = signSafeMod(hash, capacity); try { while (true) { // First try optimistic locking long storedItem = table[bucket]; if (!acquiredLock && validate(stamp)) { // The values we have read are consistent if (item == storedItem) { return true; } else if (storedItem == EmptyItem) { // Not found return false; } } else { // Fallback to acquiring read lock if (!acquiredLock) { stamp = readLock(); acquiredLock = true; bucket = signSafeMod(hash, capacity); storedItem = table[bucket]; } if (item == storedItem) { return true; } else if (storedItem == EmptyItem) { // Not found return false; } } bucket = (bucket + 1) & (table.length - 1); } } finally { if (acquiredLock) { unlockRead(stamp); } } } boolean add(long item, long hash) { long stamp = writeLock(); int bucket = signSafeMod(hash, capacity); // Remember where we find the first available spot int firstDeletedItem = -1; try { while (true) { long storedItem = table[bucket]; if (item == storedItem) { // Item was already in set return false; } else if (storedItem == EmptyItem) { // Found an empty bucket. This means the key is not in the map. If we've already seen a deleted // key, we should write at that position if (firstDeletedItem != -1) { bucket = firstDeletedItem; } else { ++usedBuckets; } table[bucket] = item; ++size; return true; } else if (storedItem == DeletedItem) { // The bucket contained a different deleted key if (firstDeletedItem == -1) { firstDeletedItem = bucket; } } bucket = (bucket + 1) & (table.length - 1); } } finally { if (usedBuckets > resizeThreshold) { try { rehash(); } finally { unlockWrite(stamp); } } else { unlockWrite(stamp); } } } private boolean remove(long item, int hash) { long stamp = writeLock(); int bucket = signSafeMod(hash, capacity); try { while (true) { long storedItem = table[bucket]; if (item == storedItem) { --size; cleanBucket(bucket); return true; } else if (storedItem == EmptyItem) { // Key wasn't found return false; } bucket = (bucket + 1) & (table.length - 1); } } finally { unlockWrite(stamp); } } private void cleanBucket(int bucket) { int nextInArray = (bucket + 1) & (table.length - 1); if (table[nextInArray] == EmptyItem) { table[bucket] = EmptyItem; --usedBuckets; } else { table[bucket] = DeletedItem; } } void clear() { long stamp = writeLock(); try { Arrays.fill(table, EmptyItem); this.size = 0; this.usedBuckets = 0; } finally { unlockWrite(stamp); } } public void forEach(ConsumerLong processor) { long stamp = tryOptimisticRead(); long[] table = this.table; boolean acquiredReadLock = false; try { // Validate no rehashing if (!validate(stamp)) { // Fallback to read lock stamp = readLock(); acquiredReadLock = true; table = this.table; } // Go through all the buckets for this section for (int bucket = 0; bucket < table.length; bucket++) { long storedItem = table[bucket]; if (!acquiredReadLock && !validate(stamp)) { // Fallback to acquiring read lock stamp = readLock(); acquiredReadLock = true; storedItem = table[bucket]; } if (storedItem != DeletedItem && storedItem != EmptyItem) { processor.accept(storedItem); } } } finally { if (acquiredReadLock) { unlockRead(stamp); } } } private void rehash() { // Expand the hashmap int newCapacity = capacity * 2; long[] newTable = new long[newCapacity]; Arrays.fill(newTable, EmptyItem); // Re-hash table for (int i = 0; i < table.length; i++) { long storedItem = table[i]; if (storedItem != EmptyItem && storedItem != DeletedItem) { insertKeyValueNoLock(newTable, newCapacity, storedItem); } } table = newTable; usedBuckets = size; // Capacity needs to be updated after the values, so that we won't see // a capacity value bigger than the actual array size capacity = newCapacity; resizeThreshold = (int) (capacity * SetFillFactor); } private static void insertKeyValueNoLock(long[] table, int capacity, long item) { int bucket = signSafeMod(hash(item), capacity); while (true) { long storedKey = table[bucket]; if (storedKey == EmptyItem) { // The bucket is empty, so we can use it table[bucket] = item; return; } bucket = (bucket + 1) & (table.length - 1); } } } private static final long HashMixer = 0xc6a4a7935bd1e995L; private static final int R = 47; static final long hash(long key) { long hash = key * HashMixer; hash ^= hash >>> R; hash *= HashMixer; return hash; } static final int signSafeMod(long n, int max) { return (int) (n & (max - 1)); } private static int alignToPowerOfTwo(int n) { return (int) Math.pow(2, 32 - Integer.numberOfLeadingZeros(n - 1)); } private static void checkBiggerEqualZero(long n) { if (n < 0L) { throw new IllegalArgumentException("Keys and values must be >= 0"); } } }
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.eventbus.EventBus; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactFactory; import com.google.devtools.build.lib.actions.ArtifactOwner; import com.google.devtools.build.lib.actions.ArtifactPrefixConflictException; import com.google.devtools.build.lib.actions.MutableActionGraph; import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; import com.google.devtools.build.lib.analysis.AnalysisEnvironment; import com.google.devtools.build.lib.analysis.AnalysisFailureEvent; import com.google.devtools.build.lib.analysis.Aspect; import com.google.devtools.build.lib.analysis.CachingAnalysisEnvironment; import com.google.devtools.build.lib.analysis.ConfiguredAspectFactory; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.ConfiguredTargetFactory; import com.google.devtools.build.lib.analysis.LabelAndConfiguration; import com.google.devtools.build.lib.analysis.RuleConfiguredTarget; import com.google.devtools.build.lib.analysis.ViewCreationFailedException; import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory; import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory.BuildInfoKey; import com.google.devtools.build.lib.analysis.config.BinTools; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.ConfigMatchingProvider; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.packages.AspectParameters; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.RuleClassProvider; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.skyframe.ActionLookupValue.ActionLookupKey; import com.google.devtools.build.lib.skyframe.AspectValue.AspectKey; import com.google.devtools.build.lib.skyframe.BuildInfoCollectionValue.BuildInfoKeyAndConfig; import com.google.devtools.build.lib.skyframe.ConfiguredTargetFunction.ConfiguredValueCreationException; import com.google.devtools.build.lib.skyframe.SkyframeActionExecutor.ConflictException; import com.google.devtools.build.lib.syntax.Label; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.skyframe.CycleInfo; import com.google.devtools.build.skyframe.ErrorInfo; import com.google.devtools.build.skyframe.EvaluationProgressReceiver; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.SkyFunction.Environment; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; /** * Skyframe-based driver of analysis. * * <p>Covers enough functionality to work as a substitute for {@code BuildView#configureTargets}. */ public final class SkyframeBuildView { private final ConfiguredTargetFactory factory; private final ArtifactFactory artifactFactory; private final SkyframeExecutor skyframeExecutor; private final Runnable legacyDataCleaner; private final BinTools binTools; private boolean enableAnalysis = false; // This hack allows us to see when a configured target has been invalidated, and thus when the set // of artifact conflicts needs to be recomputed (whenever a configured target has been invalidated // or newly evaluated). private final EvaluationProgressReceiver invalidationReceiver = new ConfiguredTargetValueInvalidationReceiver(); private final Set<SkyKey> evaluatedConfiguredTargets = Sets.newConcurrentHashSet(); // Used to see if checks of graph consistency need to be done after analysis. private volatile boolean someConfiguredTargetEvaluated = false; // We keep the set of invalidated configuration target keys so that we can know if something // has been invalidated after graph pruning has been executed. private Set<SkyKey> dirtiedConfiguredTargetKeys = Sets.newConcurrentHashSet(); private volatile boolean anyConfiguredTargetDeleted = false; private final RuleClassProvider ruleClassProvider; // The host configuration containing all fragments used by this build's transitive closure. private BuildConfiguration topLevelHostConfiguration; // Fragment-limited versions of the host configuration. It's faster to create/cache these here // than to store them in Skyframe. private Map<Set<Class<? extends BuildConfiguration.Fragment>>, BuildConfiguration> hostConfigurationCache = Maps.newConcurrentMap(); public SkyframeBuildView(ConfiguredTargetFactory factory, ArtifactFactory artifactFactory, SkyframeExecutor skyframeExecutor, Runnable legacyDataCleaner, BinTools binTools, RuleClassProvider ruleClassProvider) { this.factory = factory; this.artifactFactory = artifactFactory; this.skyframeExecutor = skyframeExecutor; this.legacyDataCleaner = legacyDataCleaner; this.binTools = binTools; this.ruleClassProvider = ruleClassProvider; skyframeExecutor.setArtifactFactoryAndBinTools(artifactFactory, binTools); } public void resetEvaluatedConfiguredTargetKeysSet() { evaluatedConfiguredTargets.clear(); } public Set<SkyKey> getEvaluatedTargetKeys() { return ImmutableSet.copyOf(evaluatedConfiguredTargets); } /** * Sets the host configuration consisting of all fragments that will be used by the top level * targets' transitive closures. * * <p>This is used to power {@link #getHostConfiguration} during analysis, which computes * fragment-trimmed host configurations from the top-level one. */ public void setTopLevelHostConfiguration(BuildConfiguration topLevelHostConfiguration) { if (topLevelHostConfiguration.equals(this.topLevelHostConfiguration)) { return; } hostConfigurationCache.clear(); this.topLevelHostConfiguration = topLevelHostConfiguration; } private void setDeserializedArtifactOwners() throws ViewCreationFailedException { Map<PathFragment, Artifact> deserializedArtifactMap = artifactFactory.getDeserializedArtifacts(); Set<Artifact> deserializedArtifacts = new HashSet<>(); for (Artifact artifact : deserializedArtifactMap.values()) { if (!artifact.getExecPath().getBaseName().endsWith(".gcda")) { // gcda files are classified as generated artifacts, but are not actually generated. All // others need owners. deserializedArtifacts.add(artifact); } } if (deserializedArtifacts.isEmpty()) { // If there are no deserialized artifacts to process, don't pay the price of iterating over // the graph. return; } for (Map.Entry<SkyKey, ActionLookupValue> entry : skyframeExecutor.getActionLookupValueMap().entrySet()) { for (Action action : entry.getValue().getActionsForFindingArtifactOwners()) { for (Artifact output : action.getOutputs()) { Artifact deserializedArtifact = deserializedArtifactMap.get(output.getExecPath()); if (deserializedArtifact != null) { deserializedArtifact.setArtifactOwner((ActionLookupKey) entry.getKey().argument()); deserializedArtifacts.remove(deserializedArtifact); } } } } if (!deserializedArtifacts.isEmpty()) { throw new ViewCreationFailedException("These artifacts were read in from the FDO profile but" + " have no generating action that could be found. If you are confident that your profile was" + " collected from the same source state at which you're building, please report this:\n" + Artifact.asExecPaths(deserializedArtifacts)); } artifactFactory.clearDeserializedArtifacts(); } /** * Analyzes the specified targets using Skyframe as the driving framework. * * @return the configured targets that should be built along with a WalkableGraph of the analysis. */ public SkyframeAnalysisResult configureTargets( List<ConfiguredTargetKey> values, List<AspectKey> aspectKeys, EventBus eventBus, boolean keepGoing) throws InterruptedException, ViewCreationFailedException { enableAnalysis(true); EvaluationResult<ActionLookupValue> result; try { result = skyframeExecutor.configureTargets(values, aspectKeys, keepGoing); } finally { enableAnalysis(false); } ImmutableMap<Action, ConflictException> badActions = skyframeExecutor.findArtifactConflicts(); Collection<AspectValue> goodAspects = Lists.newArrayListWithCapacity(values.size()); for (AspectKey aspectKey : aspectKeys) { AspectValue value = (AspectValue) result.get(AspectValue.key(aspectKey)); if (value == null) { // Skip aspects that couldn't be applied to targets. continue; } goodAspects.add(value); } // Filter out all CTs that have a bad action and convert to a list of configured targets. This // code ensures that the resulting list of configured targets has the same order as the incoming // list of values, i.e., that the order is deterministic. Collection<ConfiguredTarget> goodCts = Lists.newArrayListWithCapacity(values.size()); for (ConfiguredTargetKey value : values) { ConfiguredTargetValue ctValue = (ConfiguredTargetValue) result.get(ConfiguredTargetValue.key(value)); if (ctValue == null) { continue; } goodCts.add(ctValue.getConfiguredTarget()); } if (!result.hasError() && badActions.isEmpty()) { setDeserializedArtifactOwners(); return new SkyframeAnalysisResult( ImmutableList.copyOf(goodCts), result.getWalkableGraph(), ImmutableList.copyOf(goodAspects)); } // --nokeep_going so we fail with an exception for the first error. // TODO(bazel-team): We might want to report the other errors through the event bus but // for keeping this code in parity with legacy we just report the first error for now. if (!keepGoing) { for (Map.Entry<Action, ConflictException> bad : badActions.entrySet()) { ConflictException ex = bad.getValue(); try { ex.rethrowTyped(); } catch (MutableActionGraph.ActionConflictException ace) { ace.reportTo(skyframeExecutor.getReporter()); String errorMsg = "Analysis of target '" + bad.getKey().getOwner().getLabel() + "' failed; build aborted"; throw new ViewCreationFailedException(errorMsg); } catch (ArtifactPrefixConflictException apce) { skyframeExecutor.getReporter().handle(Event.error(apce.getMessage())); } throw new ViewCreationFailedException(ex.getMessage()); } Map.Entry<SkyKey, ErrorInfo> error = result.errorMap().entrySet().iterator().next(); SkyKey topLevel = error.getKey(); ErrorInfo errorInfo = error.getValue(); assertSaneAnalysisError(errorInfo, topLevel); skyframeExecutor.getCyclesReporter().reportCycles(errorInfo.getCycleInfo(), topLevel, skyframeExecutor.getReporter()); Throwable cause = errorInfo.getException(); Preconditions.checkState(cause != null || !Iterables.isEmpty(errorInfo.getCycleInfo()), errorInfo); String errorMsg = "Analysis of target '" + ConfiguredTargetValue.extractLabel(topLevel) + "' failed; build aborted"; if (cause instanceof ActionConflictException) { ((ActionConflictException) cause).reportTo(skyframeExecutor.getReporter()); } throw new ViewCreationFailedException(errorMsg); } // --keep_going : We notify the error and return a ConfiguredTargetValue for (Map.Entry<SkyKey, ErrorInfo> errorEntry : result.errorMap().entrySet()) { if (values.contains(errorEntry.getKey().argument())) { SkyKey errorKey = errorEntry.getKey(); ConfiguredTargetKey label = (ConfiguredTargetKey) errorKey.argument(); ErrorInfo errorInfo = errorEntry.getValue(); assertSaneAnalysisError(errorInfo, errorKey); skyframeExecutor.getCyclesReporter().reportCycles(errorInfo.getCycleInfo(), errorKey, skyframeExecutor.getReporter()); // We try to get the root cause key first from ErrorInfo rootCauses. If we don't have one // we try to use the cycle culprit if the error is a cycle. Otherwise we use the top-level // error key. Label root; if (!Iterables.isEmpty(errorEntry.getValue().getRootCauses())) { SkyKey culprit = Preconditions.checkNotNull(Iterables.getFirst( errorEntry.getValue().getRootCauses(), null)); root = ((ConfiguredTargetKey) culprit.argument()).getLabel(); } else { root = maybeGetConfiguredTargetCycleCulprit(errorInfo.getCycleInfo()); } Exception cause = errorInfo.getException(); if (cause instanceof ActionConflictException) { ((ActionConflictException) cause).reportTo(skyframeExecutor.getReporter()); } skyframeExecutor.getReporter().handle( Event.warn("errors encountered while analyzing target '" + label.getLabel() + "': it will not be built")); eventBus.post(new AnalysisFailureEvent( LabelAndConfiguration.of(label.getLabel(), label.getConfiguration()), root)); } } Collection<Exception> reportedExceptions = Sets.newHashSet(); for (Map.Entry<Action, ConflictException> bad : badActions.entrySet()) { ConflictException ex = bad.getValue(); try { ex.rethrowTyped(); } catch (MutableActionGraph.ActionConflictException ace) { ace.reportTo(skyframeExecutor.getReporter()); skyframeExecutor.getReporter() .handle(Event.warn("errors encountered while analyzing target '" + bad.getKey().getOwner().getLabel() + "': it will not be built")); } catch (ArtifactPrefixConflictException apce) { if (reportedExceptions.add(apce)) { skyframeExecutor.getReporter().handle(Event.error(apce.getMessage())); } } } if (!badActions.isEmpty()) { // In order to determine the set of configured targets transitively error free from action // conflict issues, we run a post-processing update() that uses the bad action map. EvaluationResult<PostConfiguredTargetValue> actionConflictResult = skyframeExecutor.postConfigureTargets(values, keepGoing, badActions); goodCts = Lists.newArrayListWithCapacity(values.size()); for (ConfiguredTargetKey value : values) { PostConfiguredTargetValue postCt = actionConflictResult.get(PostConfiguredTargetValue.key(value)); if (postCt != null) { goodCts.add(postCt.getCt()); } } } setDeserializedArtifactOwners(); return new SkyframeAnalysisResult( ImmutableList.copyOf(goodCts), result.getWalkableGraph(), ImmutableList.copyOf(goodAspects)); } @Nullable Label maybeGetConfiguredTargetCycleCulprit(Iterable<CycleInfo> cycleInfos) { for (CycleInfo cycleInfo : cycleInfos) { SkyKey culprit = Iterables.getFirst(cycleInfo.getCycle(), null); if (culprit == null) { continue; } if (culprit.functionName().equals(SkyFunctions.CONFIGURED_TARGET)) { return ((LabelAndConfiguration) culprit.argument()).getLabel(); } } return null; } private static void assertSaneAnalysisError(ErrorInfo errorInfo, SkyKey key) { Throwable cause = errorInfo.getException(); if (cause != null) { // We should only be trying to configure targets when the loading phase succeeds, meaning // that the only errors should be analysis errors. Preconditions.checkState(cause instanceof ConfiguredValueCreationException || cause instanceof ActionConflictException, "%s -> %s", key, errorInfo); } } ArtifactFactory getArtifactFactory() { return artifactFactory; } /** * Because we don't know what build-info artifacts this configured target may request, we * conservatively register a dep on all of them. */ // TODO(bazel-team): Allow analysis to return null so the value builder can exit and wait for a // restart deps are not present. private boolean getWorkspaceStatusValues(Environment env, BuildConfiguration config) { env.getValue(WorkspaceStatusValue.SKY_KEY); Map<BuildInfoKey, BuildInfoFactory> buildInfoFactories = PrecomputedValue.BUILD_INFO_FACTORIES.get(env); if (buildInfoFactories == null) { return false; } // These factories may each create their own build info artifacts, all depending on the basic // build-info.txt and build-changelist.txt. List<SkyKey> depKeys = Lists.newArrayList(); for (BuildInfoKey key : buildInfoFactories.keySet()) { if (buildInfoFactories.get(key).isEnabled(config)) { depKeys.add(BuildInfoCollectionValue.key(new BuildInfoKeyAndConfig(key, config))); } } env.getValues(depKeys); return !env.valuesMissing(); } /** Returns null if any build-info values are not ready. */ @Nullable CachingAnalysisEnvironment createAnalysisEnvironment(ArtifactOwner owner, boolean isSystemEnv, EventHandler eventHandler, Environment env, BuildConfiguration config) { if (config != null && !getWorkspaceStatusValues(env, config)) { return null; } boolean extendedSanityChecks = config != null && config.extendedSanityChecks(); boolean allowRegisteringActions = config == null || config.isActionsEnabled(); return new CachingAnalysisEnvironment( artifactFactory, owner, isSystemEnv, extendedSanityChecks, eventHandler, env, allowRegisteringActions, binTools); } /** * Invokes the appropriate constructor to create a {@link ConfiguredTarget} instance. * * <p>For use in {@code ConfiguredTargetFunction}. * * <p>Returns null if Skyframe deps are missing or upon certain errors. */ @Nullable ConfiguredTarget createConfiguredTarget(Target target, BuildConfiguration configuration, CachingAnalysisEnvironment analysisEnvironment, ListMultimap<Attribute, ConfiguredTarget> prerequisiteMap, Set<ConfigMatchingProvider> configConditions) throws InterruptedException { Preconditions.checkState(enableAnalysis, "Already in execution phase %s %s", target, configuration); return factory.createConfiguredTarget(analysisEnvironment, artifactFactory, target, configuration, getHostConfiguration(configuration), prerequisiteMap, configConditions); } /** * Returns the host configuration trimmed to the same fragments as the input configuration. If * the input is null, returns the top-level host configuration. * * <p>For static configurations, this unconditionally returns the (sole) top-level configuration. * * <p>This may only be called after {@link #setTopLevelHostConfiguration} has set the * correct host configuration at the top-level. */ public BuildConfiguration getHostConfiguration(BuildConfiguration config) { if (config == null || !config.useDynamicConfigurations()) { return topLevelHostConfiguration; } Set<Class<? extends BuildConfiguration.Fragment>> fragmentClasses = config.fragmentClasses(); BuildConfiguration hostConfig = hostConfigurationCache.get(fragmentClasses); if (hostConfig != null) { return hostConfig; } BuildConfiguration trimmedConfig = topLevelHostConfiguration.clone(fragmentClasses, ruleClassProvider); hostConfigurationCache.put(fragmentClasses, trimmedConfig); return trimmedConfig; } @Nullable public Aspect createAspect( AnalysisEnvironment env, RuleConfiguredTarget associatedTarget, ConfiguredAspectFactory aspectFactory, ListMultimap<Attribute, ConfiguredTarget> prerequisiteMap, Set<ConfigMatchingProvider> configConditions, AspectParameters aspectParameters) { return factory.createAspect(env, associatedTarget, aspectFactory, aspectParameters, prerequisiteMap, configConditions, getHostConfiguration(associatedTarget.getConfiguration())); } @Nullable SkyframeDependencyResolver createDependencyResolver(Environment env) { return new SkyframeDependencyResolver(env); } /** * Workaround to clear all legacy data, like the action graph and the artifact factory. We need * to clear them to avoid conflicts. * TODO(bazel-team): Remove this workaround. [skyframe-execution] */ void clearLegacyData() { legacyDataCleaner.run(); } /** * Hack to invalidate actions in legacy action graph when their values are invalidated in * skyframe. */ EvaluationProgressReceiver getInvalidationReceiver() { return invalidationReceiver; } /** Clear the invalidated configured targets detected during loading and analysis phases. */ public void clearInvalidatedConfiguredTargets() { dirtiedConfiguredTargetKeys = Sets.newConcurrentHashSet(); anyConfiguredTargetDeleted = false; } public boolean isSomeConfiguredTargetInvalidated() { return anyConfiguredTargetDeleted || !dirtiedConfiguredTargetKeys.isEmpty(); } /** * Called from SkyframeExecutor to see whether the graph needs to be checked for artifact * conflicts. Returns true if some configured target has been evaluated since the last time the * graph was checked for artifact conflicts (with that last time marked by a call to * {@link #resetEvaluatedConfiguredTargetFlag()}). */ boolean isSomeConfiguredTargetEvaluated() { Preconditions.checkState(!enableAnalysis); return someConfiguredTargetEvaluated; } /** * Called from SkyframeExecutor after the graph is checked for artifact conflicts so that * the next time {@link #isSomeConfiguredTargetEvaluated} is called, it will return true only if * some configured target has been evaluated since the last check for artifact conflicts. */ void resetEvaluatedConfiguredTargetFlag() { someConfiguredTargetEvaluated = false; } /** * {@link #createConfiguredTarget} will only create configured targets if this is set to true. It * should be set to true before any Skyframe update call that might call into {@link * #createConfiguredTarget}, and false immediately after the call. Use it to fail-fast in the case * that a target is requested for analysis not during the analysis phase. */ void enableAnalysis(boolean enable) { this.enableAnalysis = enable; } private class ConfiguredTargetValueInvalidationReceiver implements EvaluationProgressReceiver { @Override public void invalidated(SkyKey skyKey, InvalidationState state) { if (skyKey.functionName().equals(SkyFunctions.CONFIGURED_TARGET)) { if (state == InvalidationState.DELETED) { anyConfiguredTargetDeleted = true; } else { // If the value was just dirtied and not deleted, then it may not be truly invalid, since // it may later get re-validated. Therefore adding the key to dirtiedConfiguredTargetKeys // is provisional--if the key is later evaluated and the value found to be clean, then we // remove it from the set. dirtiedConfiguredTargetKeys.add(skyKey); } } } @Override public void enqueueing(SkyKey skyKey) {} @Override public void computed(SkyKey skyKey, long elapsedTimeNanos) {} @Override public void evaluated(SkyKey skyKey, Supplier<SkyValue> skyValueSupplier, EvaluationState state) { if (skyKey.functionName().equals(SkyFunctions.CONFIGURED_TARGET)) { switch (state) { case BUILT: if (skyValueSupplier.get() != null) { evaluatedConfiguredTargets.add(skyKey); // During multithreaded operation, this is only set to true, so no concurrency issues. someConfiguredTargetEvaluated = true; } break; case CLEAN: // If the configured target value did not need to be rebuilt, then it wasn't truly // invalid. dirtiedConfiguredTargetKeys.remove(skyKey); break; } } } } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.form; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionMapping; import org.unitime.timetable.model.DepartmentRoomFeature; import org.unitime.timetable.model.GlobalRoomFeature; import org.unitime.timetable.model.Preference; import org.unitime.timetable.util.DynamicList; import org.unitime.timetable.util.DynamicListObjectFactory; /** * MyEclipse Struts * Creation date: 05-12-2006 * * XDoclet definition: * @struts.form name="editRoomFeatureForm" * * @author Tomas Muller */ public class EditRoomFeatureForm extends ActionForm { /** * */ private static final long serialVersionUID = -7728130917482276173L; // --------------------------------------------------------- Instance Variables String doit; String id; String roomLabel; private List globalRoomFeatureIds; private List departmentRoomFeatureIds; private List globalRoomFeatureNames; private List departmentRoomFeatureNames; private List globalRoomFeaturesEditable; private List departmentRoomFeaturesEditable; private List globalRoomFeaturesAssigned; private List departmentRoomFeaturesAssigned; // --------------------------------------------------------- Classes /** Factory to create dynamic list element for room groups */ protected DynamicListObjectFactory factoryRoomFeatures = new DynamicListObjectFactory() { public Object create() { return new String(Preference.BLANK_PREF_VALUE); } }; // --------------------------------------------------------- Methods public String getDoit() { return doit; } public void setDoit(String doit) { this.doit = doit; } /** * Method validate * @param mapping * @param request * @return ActionErrors */ public ActionErrors validate( ActionMapping mapping, HttpServletRequest request) { return null; } /** * Method reset * @param mapping * @param request */ public void reset(ActionMapping mapping, HttpServletRequest request) { globalRoomFeatureIds = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); departmentRoomFeatureIds = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); globalRoomFeatureNames = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); departmentRoomFeatureNames = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); globalRoomFeaturesEditable = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); departmentRoomFeaturesEditable = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); globalRoomFeaturesAssigned = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); departmentRoomFeaturesAssigned = DynamicList.getInstance(new ArrayList(), factoryRoomFeatures); } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getRoomLabel() { return roomLabel; } public void setRoomLabel(String roomLabel) { this.roomLabel = roomLabel; } public List getGlobalRoomFeatureIds() { return globalRoomFeatureIds; } public void setGlobalRoomFeatureIds(List globalRoomFeatureIds) { this.globalRoomFeatureIds = globalRoomFeatureIds; } public List getGlobalRoomFeatureNames() { return globalRoomFeatureNames; } public void setGlobalRoomFeatureNames(List globalRoomFeatureNames) { this.globalRoomFeatureNames = globalRoomFeatureNames; } public List getGlobalRoomFeaturesEditable() { return globalRoomFeaturesEditable; } public void setGlobalRoomFeaturesEditable(List globalRoomFeaturesEditable) { this.globalRoomFeaturesEditable = globalRoomFeaturesEditable; } public String getGlobalRoomFeaturesEditable(int key) { return globalRoomFeaturesEditable.get(key).toString(); } public void setGlobalRoomFeaturesEditable(int key, Object value) { this.globalRoomFeaturesEditable.set(key, value); } public List getDepartmentRoomFeatureIds() { return departmentRoomFeatureIds; } public void setDepartmentRoomFeatureIds(List departmentRoomFeatureIds) { this.departmentRoomFeatureIds = departmentRoomFeatureIds; } public List getDepartmentRoomFeatureNames() { return departmentRoomFeatureNames; } public void setDepartmentRoomFeatureNames(List departmentRoomFeatureNames) { this.departmentRoomFeatureNames = departmentRoomFeatureNames; } public List getDepartmentRoomFeaturesEditable() { return departmentRoomFeaturesEditable; } public void setDepartmentRoomFeaturesEditable(List departmentRoomFeaturesEditable) { this.departmentRoomFeaturesEditable = departmentRoomFeaturesEditable; } public String getdepartmentRoomFeaturesEditable(int key) { return departmentRoomFeaturesEditable.get(key).toString(); } public void setdepartmentRoomFeaturesEditable(int key, Object value) { this.departmentRoomFeaturesEditable.set(key, value); } public void addToGlobalRoomFeatures(GlobalRoomFeature rf, Boolean editable, Boolean assigned) { this.globalRoomFeatureIds.add(rf.getUniqueId().toString()); this.globalRoomFeatureNames.add(rf.getLabel() + (rf.getFeatureType() == null ? "" : " (" + rf.getFeatureType().getLabel() + ")")); this.globalRoomFeaturesEditable.add(editable); this.globalRoomFeaturesAssigned.add(assigned); } public void addToDepartmentRoomFeatures(DepartmentRoomFeature rf, Boolean editable, Boolean assigned) { this.departmentRoomFeatureIds.add(rf.getUniqueId().toString()); this.departmentRoomFeatureNames.add(rf.getLabel()+" ("+(rf.getDepartment().isExternalManager().booleanValue()?rf.getDepartment().getExternalMgrLabel():rf.getDepartment().getDeptCode()+" - "+rf.getDepartment().getName())+(rf.getFeatureType() == null ? "" : ", " + rf.getFeatureType().getLabel())+")"); this.departmentRoomFeaturesEditable.add(editable); this.departmentRoomFeaturesAssigned.add(assigned); } public List getGlobalRoomFeaturesAssigned() { return globalRoomFeaturesAssigned; } public void setGlobalRoomFeaturesAssigned(List globalRoomFeaturesAssigned) { this.globalRoomFeaturesAssigned = globalRoomFeaturesAssigned; } public List getDepartmentRoomFeaturesAssigned() { return departmentRoomFeaturesAssigned; } public void setDepartmentRoomFeaturesAssigned(List departmentRoomFeaturesAssigned) { this.departmentRoomFeaturesAssigned = departmentRoomFeaturesAssigned; } }
/* * Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.util.Iterator; import java.util.Objects; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import org.reactivestreams.Subscriber; import reactor.core.CoreSubscriber; import reactor.core.Fuseable; import reactor.util.annotation.Nullable; /** * Emits the contents of an Iterable source. * * @param <T> the value type * * @see <a href="https://github.com/reactor/reactive-streams-commons">Reactive-Streams-Commons</a> */ final class FluxIterable<T> extends Flux<T> implements Fuseable { final Iterable<? extends T> iterable; private final Runnable onClose; FluxIterable(Iterable<? extends T> iterable, Runnable onClose) { this.iterable = Objects.requireNonNull(iterable, "iterable"); this.onClose = onClose; } FluxIterable(Iterable<? extends T> iterable) { this(iterable, null); } @Override public void subscribe(CoreSubscriber<? super T> actual) { Iterator<? extends T> it; try { it = iterable.iterator(); } catch (Throwable e) { Operators.error(actual, Operators.onOperatorError(e, actual.currentContext())); return; } subscribe(actual, it, onClose); } /** * Common method to take an Iterator as a source of values. * * @param s the subscriber to feed this iterator to * @param it the iterator to use as a source of values */ @SuppressWarnings("unchecked") static <T> void subscribe(CoreSubscriber<? super T> s, Iterator<? extends T> it) { subscribe(s, it, null); } /** * Common method to take an Iterator as a source of values. * * @param s the subscriber to feed this iterator to * @param it the iterator to use as a source of values * @param onClose close handler to call once we're done with the iterator (provided it * is not null, this includes when the iteration errors or complete or the subscriber * is cancelled). Null to ignore. */ @SuppressWarnings("unchecked") static <T> void subscribe(CoreSubscriber<? super T> s, Iterator<? extends T> it, @Nullable Runnable onClose) { //noinspection ConstantConditions if (it == null) { Operators.error(s, new NullPointerException("The iterator is null")); return; } boolean b; try { b = it.hasNext(); } catch (Throwable e) { Operators.error(s, Operators.onOperatorError(e, s.currentContext())); if (onClose != null) { try { onClose.run(); } catch (Throwable t) { Operators.onErrorDropped(t, s.currentContext()); } } return; } if (!b) { Operators.complete(s); if (onClose != null) { try { onClose.run(); } catch (Throwable t) { Operators.onErrorDropped(t, s.currentContext()); } } return; } if (s instanceof ConditionalSubscriber) { s.onSubscribe(new IterableSubscriptionConditional<>((ConditionalSubscriber<? super T>) s, it, onClose)); } else { s.onSubscribe(new IterableSubscription<>(s, it, onClose)); } } static final class IterableSubscription<T> implements InnerProducer<T>, SynchronousSubscription<T> { final CoreSubscriber<? super T> actual; final Iterator<? extends T> iterator; final Runnable onClose; volatile boolean cancelled; volatile long requested; @SuppressWarnings("rawtypes") static final AtomicLongFieldUpdater<IterableSubscription> REQUESTED = AtomicLongFieldUpdater.newUpdater(IterableSubscription.class, "requested"); int state; /** * Indicates that the iterator's hasNext returned true before but the value is not * yet retrieved. */ static final int STATE_HAS_NEXT_NO_VALUE = 0; /** * Indicates that there is a value available in current. */ static final int STATE_HAS_NEXT_HAS_VALUE = 1; /** * Indicates that there are no more values available. */ static final int STATE_NO_NEXT = 2; /** * Indicates that the value has been consumed and a new value should be retrieved. */ static final int STATE_CALL_HAS_NEXT = 3; T current; IterableSubscription(CoreSubscriber<? super T> actual, Iterator<? extends T> iterator, @Nullable Runnable onClose) { this.actual = actual; this.iterator = iterator; this.onClose = onClose; } IterableSubscription(CoreSubscriber<? super T> actual, Iterator<? extends T> iterator) { this(actual, iterator, null); } @Override public void request(long n) { if (Operators.validate(n)) { if (Operators.addCap(REQUESTED, this, n) == 0) { if (n == Long.MAX_VALUE) { fastPath(); } else { slowPath(n); } } } } private void onCloseWithDropError() { if (onClose != null) { try { onClose.run(); } catch (Throwable t) { Operators.onErrorDropped(t, actual.currentContext()); } } } void slowPath(long n) { final Iterator<? extends T> a = iterator; final Subscriber<? super T> s = actual; long e = 0L; for (; ; ) { while (e != n) { T t; try { t = Objects.requireNonNull(a.next(), "The iterator returned a null value"); } catch (Throwable ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } s.onNext(t); if (cancelled) { return; } boolean b; try { b = a.hasNext(); } catch (Throwable ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } if (!b) { s.onComplete(); onCloseWithDropError(); return; } e++; } n = requested; if (n == e) { n = REQUESTED.addAndGet(this, -e); if (n == 0L) { return; } e = 0L; } } } void fastPath() { final Iterator<? extends T> a = iterator; final Subscriber<? super T> s = actual; for (; ; ) { if (cancelled) { return; } T t; try { t = Objects.requireNonNull(a.next(), "The iterator returned a null value"); } catch (Exception ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } s.onNext(t); if (cancelled) { return; } boolean b; try { b = a.hasNext(); } catch (Exception ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } if (!b) { s.onComplete(); onCloseWithDropError(); return; } } } @Override public void cancel() { onCloseWithDropError(); cancelled = true; } @Override public CoreSubscriber<? super T> actual() { return actual; } @Override @Nullable public Object scanUnsafe(Attr key) { if (key == Attr.CANCELLED) return cancelled; if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested; if (key == Attr.TERMINATED) return state == STATE_NO_NEXT; return InnerProducer.super.scanUnsafe(key); } @Override public void clear() { state = STATE_NO_NEXT; } @Override public boolean isEmpty() { int s = state; if (s == STATE_NO_NEXT) { return true; } else if (s == STATE_HAS_NEXT_HAS_VALUE || s == STATE_HAS_NEXT_NO_VALUE) { return false; } else if (iterator.hasNext()) { state = STATE_HAS_NEXT_NO_VALUE; return false; } state = STATE_NO_NEXT; return true; } @Override @Nullable public T poll() { if (!isEmpty()) { T c; if (state == STATE_HAS_NEXT_NO_VALUE) { c = iterator.next(); } else { c = current; current = null; } state = STATE_CALL_HAS_NEXT; if (c == null) { onCloseWithDropError(); throw new NullPointerException("iterator returned a null value"); } return c; } onCloseWithDropError(); return null; } @Override public int size() { if (state == STATE_NO_NEXT) { return 0; } return 1; } } static final class IterableSubscriptionConditional<T> implements InnerProducer<T>, SynchronousSubscription<T> { final ConditionalSubscriber<? super T> actual; final Iterator<? extends T> iterator; final Runnable onClose; volatile boolean cancelled; volatile long requested; @SuppressWarnings("rawtypes") static final AtomicLongFieldUpdater<IterableSubscriptionConditional> REQUESTED = AtomicLongFieldUpdater.newUpdater(IterableSubscriptionConditional.class, "requested"); int state; /** * Indicates that the iterator's hasNext returned true before but the value is not * yet retrieved. */ static final int STATE_HAS_NEXT_NO_VALUE = 0; /** * Indicates that there is a value available in current. */ static final int STATE_HAS_NEXT_HAS_VALUE = 1; /** * Indicates that there are no more values available. */ static final int STATE_NO_NEXT = 2; /** * Indicates that the value has been consumed and a new value should be retrieved. */ static final int STATE_CALL_HAS_NEXT = 3; T current; IterableSubscriptionConditional(ConditionalSubscriber<? super T> actual, Iterator<? extends T> iterator, @Nullable Runnable onClose) { this.actual = actual; this.iterator = iterator; this.onClose = onClose; } IterableSubscriptionConditional(ConditionalSubscriber<? super T> actual, Iterator<? extends T> iterator) { this(actual, iterator, null); } @Override public void request(long n) { if (Operators.validate(n)) { if (Operators.addCap(REQUESTED, this, n) == 0) { if (n == Long.MAX_VALUE) { fastPath(); } else { slowPath(n); } } } } private void onCloseWithDropError() { if (onClose != null) { try { onClose.run(); } catch (Throwable t) { Operators.onErrorDropped(t, actual.currentContext()); } } } void slowPath(long n) { final Iterator<? extends T> a = iterator; final ConditionalSubscriber<? super T> s = actual; long e = 0L; for (; ; ) { while (e != n) { T t; try { t = Objects.requireNonNull(a.next(), "The iterator returned a null value"); } catch (Throwable ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } boolean consumed = s.tryOnNext(t); if (cancelled) { return; } boolean b; try { b = a.hasNext(); } catch (Throwable ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } if (!b) { s.onComplete(); onCloseWithDropError(); return; } if (consumed) { e++; } } n = requested; if (n == e) { n = REQUESTED.addAndGet(this, -e); if (n == 0L) { return; } e = 0L; } } } void fastPath() { final Iterator<? extends T> a = iterator; final ConditionalSubscriber<? super T> s = actual; for (; ; ) { if (cancelled) { return; } T t; try { t = Objects.requireNonNull(a.next(), "The iterator returned a null value"); } catch (Exception ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } s.tryOnNext(t); if (cancelled) { return; } boolean b; try { b = a.hasNext(); } catch (Exception ex) { s.onError(ex); onCloseWithDropError(); return; } if (cancelled) { return; } if (!b) { s.onComplete(); onCloseWithDropError(); return; } } } @Override public void cancel() { onCloseWithDropError(); cancelled = true; } @Override public CoreSubscriber<? super T> actual() { return actual; } @Override @Nullable public Object scanUnsafe(Attr key) { if (key == Attr.CANCELLED) return cancelled; if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested; if (key == Attr.TERMINATED) return state == STATE_NO_NEXT; return InnerProducer.super.scanUnsafe(key); } @Override public void clear() { state = STATE_NO_NEXT; } @Override public boolean isEmpty() { int s = state; if (s == STATE_NO_NEXT) { return true; } else if (s == STATE_HAS_NEXT_HAS_VALUE || s == STATE_HAS_NEXT_NO_VALUE) { return false; } else if (iterator.hasNext()) { state = STATE_HAS_NEXT_NO_VALUE; return false; } state = STATE_NO_NEXT; return true; } @Override @Nullable public T poll() { if (!isEmpty()) { T c; if (state == STATE_HAS_NEXT_NO_VALUE) { c = iterator.next(); } else { c = current; current = null; } state = STATE_CALL_HAS_NEXT; return c; } onCloseWithDropError(); return null; } @Override public int size() { if (state == STATE_NO_NEXT) { return 0; } return 1; // no way of knowing without enumerating first } } }
package com.alibaba.cobar.client; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import static org.testng.AssertJUnit.assertNull; import static org.testng.AssertJUnit.assertTrue; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import org.apache.commons.lang.ArrayUtils; import org.springframework.jdbc.core.RowMapper; import org.testng.annotations.Test; import com.alibaba.cobar.client.entities.Follower; import com.alibaba.cobar.client.support.utils.CollectionUtils; @Test(sequential=true) public class CobarSqlMapClientTemplateWithSqlActionOnlyRouterTest extends AbstractTestNGCobarClientTest { public CobarSqlMapClientTemplateWithSqlActionOnlyRouterTest() { super(new String[] { "META-INF/spring/cobar-client-appctx.xml", "META-INF/spring/datasources-appctx.xml", "META-INF/spring/sqlaction-router-appctx.xml" }); } public void testInsertOnCobarSqlMapClientWithSqlActionOnlyRules() { String name = "Darren"; Follower follower = new Follower(name); getSqlMapClientTemplate().insert("com.alibaba.cobar.client.entities.Follower.create", follower); // since no rule for this insert, it will be inserted into default data source, that's, partition1 String confirmSQL = "select name from followers where name='" + name + "'"; verifyEntityExistenceOnSpecificDataSource(confirmSQL, jt1m); verifyEntityNonExistenceOnSpecificDataSource(confirmSQL, jt1s); verifyEntityNonExistenceOnSpecificDataSource(confirmSQL, jt2m); verifyEntityNonExistenceOnSpecificDataSource(confirmSQL, jt2s); // this sql action is routed to partition2, so can't find any matched record. Follower followerToFind = (Follower) getSqlMapClientTemplate().queryForObject( "com.alibaba.cobar.client.entities.Follower.finaByName", name); assertNull(followerToFind); // sql action below will be against all of the partitions , so we will get back what we want here @SuppressWarnings("unchecked") List<Follower> followers = (List<Follower>) getSqlMapClientTemplate().queryForList( "com.alibaba.cobar.client.entities.Follower.findAll"); assertTrue(CollectionUtils.isNotEmpty(followers)); assertEquals(1, followers.size()); assertEquals(name, followers.get(0).getName()); } public void testInsertWithBatchCommitOnCobarSqlMapClientTemplateWithSqlActionOnlyRules() { String[] names = { "Aaron", "Amily", "Aragon", "Darren", "Darwin" }; batchInsertMultipleFollowersAsFixture(names); // since no routing rule for insertion, all of the records will be inserted into default data source, that's, partition1 for (String name : names) { String confirmSQL = "select name from followers where name='" + name + "'"; verifyEntityExistenceOnSpecificDataSource(confirmSQL, jt1m); verifyEntityNonExistenceOnSpecificDataSource(confirmSQL, jt1s); verifyEntityNonExistenceOnSpecificDataSource(confirmSQL, jt2m); verifyEntityNonExistenceOnSpecificDataSource(confirmSQL, jt2s); } // since sql action below is routed to partition2, so no record will be found with it. for (String name : names) { Follower followerToFind = (Follower) getSqlMapClientTemplate().queryForObject( "com.alibaba.cobar.client.entities.Follower.finaByName", name); assertNull(followerToFind); } // although records only reside on partition1, but we can get all of them with sql action below @SuppressWarnings("unchecked") List<Follower> followers = (List<Follower>) getSqlMapClientTemplate().queryForList( "com.alibaba.cobar.client.entities.Follower.findAll"); assertTrue(CollectionUtils.isNotEmpty(followers)); assertEquals(names.length, followers.size()); for (Follower f : followers) { assertTrue(ArrayUtils.contains(names, f.getName())); } } public void testDeleteOnCobarSqlMapClientTemplate() { String name = "Darren"; String sqlAction = "com.alibaba.cobar.client.entities.Follower.deleteByName"; // no record at beginning assertEquals(0, getSqlMapClientTemplate().delete(sqlAction, name)); // insert 1 record and delete will affect this record which resides on partition1 Follower follower = new Follower(name); getSqlMapClientTemplate().insert("com.alibaba.cobar.client.entities.Follower.create", follower); assertEquals(1, getSqlMapClientTemplate().delete(sqlAction, name)); // insert 1 record to partition2, delete will NOT affect it because no rule is defined for it. int updatedRow = jt2m.update("insert into followers(name) values('" + name + "')"); if (updatedRow == 1) // make sure it is do inserted into partition2 successfully. { assertEquals(0, getSqlMapClientTemplate().delete(sqlAction, name)); @SuppressWarnings("unchecked") List<Follower> followers = (List<Follower>) getSqlMapClientTemplate().queryForList( "com.alibaba.cobar.client.entities.Follower.findAll"); assertTrue(CollectionUtils.isNotEmpty(followers)); assertEquals(1, followers.size()); assertEquals(name, followers.get(0).getName()); } } /** * insert data onto default data source , and query will against all of the * partitions, so all of the records will be returned as expected. */ public void testQueryForListOnCobarSqlMapClientTemplateNormally() { String[] names = { "Aaron", "Amily", "Aragon", "Darren", "Darwin" }; batchInsertMultipleFollowersAsFixture(names); @SuppressWarnings("unchecked") List<Follower> followers = (List<Follower>) getSqlMapClientTemplate().queryForList( "com.alibaba.cobar.client.entities.Follower.findAll"); assertTrue(CollectionUtils.isNotEmpty(followers)); assertEquals(names.length, followers.size()); for (Follower f : followers) { assertTrue(ArrayUtils.contains(names, f.getName())); } } /** * although records are inserted onto patition2, but since the query is * against all of the data sources, so all of the records will be returned * as expected. */ public void testQueryForListOnCobarSqlMapClientTemplateWithoutDefaultPartitionData() { String[] names = { "Aaron", "Amily", "Aragon", "Darren", "Darwin" }; batchInsertMultipleFollowersAsFixtureWithJdbcTemplate(names, jt2m); @SuppressWarnings("unchecked") List<Follower> followers = (List<Follower>) getSqlMapClientTemplate().queryForList( "com.alibaba.cobar.client.entities.Follower.findAll"); assertTrue(CollectionUtils.isNotEmpty(followers)); assertEquals(names.length, followers.size()); for (Follower f : followers) { assertTrue(ArrayUtils.contains(names, f.getName())); } } /** * insert records onto partition1, but the * 'com.alibaba.cobar.client.entities.Follower.finaByName' will be performed * against partition2 as per the routing rule, so no record will be * returned. */ public void testQueryForObjectOnCobarSqlMapClientTemplateWithDefaultPartition() { String[] names = { "Aaron", "Amily", "Aragon", "Darren", "Darwin" }; batchInsertMultipleFollowersAsFixture(names); for (String name : names) { Follower f = (Follower) getSqlMapClientTemplate().queryForObject( "com.alibaba.cobar.client.entities.Follower.finaByName", name); assertNull(f); } } /** * we insert records onto partition2, and the * 'com.alibaba.cobar.client.entities.Follower.finaByName' action will be * performed against partition2 too, so each record will be returned as * expected. */ public void testQueryForObjectOnCobarSqlMapClientTemplateWithFillingDataOntoPartition2() { String[] names = { "Aaron", "Amily", "Aragon", "Darren", "Darwin" }; batchInsertMultipleFollowersAsFixtureWithJdbcTemplate(names, jt2m); for (String name : names) { Follower f = (Follower) getSqlMapClientTemplate().queryForObject( "com.alibaba.cobar.client.entities.Follower.finaByName", name); assertNotNull(f); assertTrue(ArrayUtils.contains(names, f.getName())); } } /** * WARNING: don't do stupid things such like below, we do this because we * can guarantee the shard id will NOT change. if you want to use cobar * client corretly, make sure you are partitioning you databases with shard * id that will not be changed once it's created!!! * <br> * with data fixtures setting up on default data source, and update with * CobarSqlMapClientTemplate. */ public void testUpdateOnCobarSqlMapClientTemplateNormally() { String[] names = { "Aaron", "Amily", "Aragon", "Darren", "Darwin" }; batchInsertMultipleFollowersAsFixture(names); for(String name:names) { Follower f = (Follower)jt1m.queryForObject("select * from followers where name=?",new Object[]{name}, new RowMapper(){ public Object mapRow(ResultSet rs, int rowNum) throws SQLException { Follower fl = new Follower(); fl.setId(rs.getLong(1)); fl.setName(rs.getString(2)); return fl; }}); assertNotNull(f); int updatedCount = getSqlMapClientTemplate().update("com.alibaba.cobar.client.entities.Follower.update", f); assertEquals(1, updatedCount); } } /** * WARNING: don't do stupid things such like below, we do this because we * can guarantee the shard id will NOT change. if you want to use cobar * client corretly, make sure you are partitioning you databases with shard * id that will not be changed once it's created!!! * <br> * with data fixtures setting up on another data source, and update with * CobarSqlMapClientTemplate. */ public void testUpdateOnCobarSqlMapClientTemplateAbnormally() { String[] names = { "Aaron", "Amily", "Aragon", "Darren", "Darwin" }; batchInsertMultipleFollowersAsFixtureWithJdbcTemplate(names, jt2m); for(String name:names) { Follower f = (Follower) getSqlMapClientTemplate().queryForObject( "com.alibaba.cobar.client.entities.Follower.finaByName", name); assertNotNull(f); // this sql action is performed against partition2 as per routing rule // sql action below will be performed against default data source(partition1), so will not affect any records on partition2 int updatedCount = getSqlMapClientTemplate().update("com.alibaba.cobar.client.entities.Follower.update", f); assertEquals(0, updatedCount); } } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.testing.testers.CollectionIteratorTester.getIteratorKnownOrderRemoveSupportedMethod; import static com.google.common.collect.testing.testers.CollectionIteratorTester.getIteratorUnknownOrderRemoveSupportedMethod; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.collect.Table.Cell; import com.google.common.collect.testing.CollectionTestSuiteBuilder; import com.google.common.collect.testing.MapInterfaceTest; import com.google.common.collect.testing.SampleElements; import com.google.common.collect.testing.SetTestSuiteBuilder; import com.google.common.collect.testing.TestSetGenerator; import com.google.common.collect.testing.TestStringCollectionGenerator; import com.google.common.collect.testing.TestStringSetGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.features.Feature; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; /** * Collection tests for {@link Table} implementations. * * @author Jared Levy * @author Louis Wasserman */ @GwtCompatible(emulated = true) public class TableCollectionTest extends TestCase { private static final Feature<?>[] COLLECTION_FEATURES = { CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES }; private static final Feature<?>[] COLLECTION_FEATURES_ORDER = { CollectionSize.ANY, CollectionFeature.KNOWN_ORDER, CollectionFeature.ALLOWS_NULL_QUERIES }; private static final Feature<?>[] COLLECTION_FEATURES_REMOVE = { CollectionSize.ANY, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES }; private static final Feature<?>[] COLLECTION_FEATURES_REMOVE_ORDER = { CollectionSize.ANY, CollectionFeature.KNOWN_ORDER, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES }; @GwtIncompatible("suite") public static Test suite() { TestSuite suite = new TestSuite(); suite.addTestSuite(ArrayRowTests.class); suite.addTestSuite(HashRowTests.class); suite.addTestSuite(TreeRowTests.class); suite.addTestSuite(TransposeRowTests.class); suite.addTestSuite(TransformValueRowTests.class); suite.addTestSuite(UnmodifiableHashRowTests.class); suite.addTestSuite(UnmodifiableTreeRowTests.class); suite.addTestSuite(ArrayColumnTests.class); suite.addTestSuite(HashColumnTests.class); suite.addTestSuite(TreeColumnTests.class); suite.addTestSuite(TransposeColumnTests.class); suite.addTestSuite(TransformValueColumnTests.class); suite.addTestSuite(UnmodifiableHashColumnTests.class); suite.addTestSuite(UnmodifiableTreeColumnTests.class); suite.addTestSuite(ArrayRowMapTests.class); suite.addTestSuite(HashRowMapTests.class); suite.addTestSuite(TreeRowMapTests.class); suite.addTestSuite(TreeRowMapHeadMapTests.class); suite.addTestSuite(TreeRowMapTailMapTests.class); suite.addTestSuite(TreeRowMapSubMapTests.class); suite.addTestSuite(TransformValueRowMapTests.class); suite.addTestSuite(UnmodifiableHashRowMapTests.class); suite.addTestSuite(UnmodifiableTreeRowMapTests.class); suite.addTestSuite(ArrayColumnMapTests.class); suite.addTestSuite(HashColumnMapTests.class); suite.addTestSuite(TreeColumnMapTests.class); suite.addTestSuite(TransformValueColumnMapTests.class); suite.addTestSuite(UnmodifiableHashColumnMapTests.class); suite.addTestSuite(UnmodifiableTreeColumnMapTests.class); // Not testing rowKeySet() or columnKeySet() of Table.transformValues() // since the transformation doesn't affect the row and column key sets. suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = ArrayTable.create( ImmutableList.copyOf(elements), ImmutableList.of(1, 2)); populateForRowKeySet(table, elements); return table.rowKeySet(); } }) .named("ArrayTable.rowKeySet") .withFeatures(CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return table.rowKeySet(); } }) .named("HashBasedTable.rowKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return table.rowKeySet(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.rowKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); table.put("z", 1, 'a'); return table.rowKeySet().headSet("x"); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.rowKeySet.headSet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); table.put("\0", 1, 'a'); return table.rowKeySet().tailSet("a"); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.rowKeySet.tailSet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); table.put("\0", 1, 'a'); table.put("z", 1, 'a'); return table.rowKeySet().subSet("a", "x"); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.rowKeySet.subSet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return Tables.unmodifiableTable(table).rowKeySet(); } }) .named("unmodifiableTable[HashBasedTable].rowKeySet") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return Tables.unmodifiableRowSortedTable(table).rowKeySet(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].rowKeySet") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = ArrayTable.create( ImmutableList.of(1, 2), ImmutableList.copyOf(elements)); populateForColumnKeySet(table, elements); return table.columnKeySet(); } }) .named("ArrayTable.columnKeySet") .withFeatures(CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = HashBasedTable.create(); populateForColumnKeySet(table, elements); return table.columnKeySet(); } }) .named("HashBasedTable.columnKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .suppressing(getIteratorUnknownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = TreeBasedTable.create(); populateForColumnKeySet(table, elements); return table.columnKeySet(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.columnKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .suppressing(getIteratorKnownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = HashBasedTable.create(); populateForColumnKeySet(table, elements); return Tables.unmodifiableTable(table).columnKeySet(); } }) .named("unmodifiableTable[HashBasedTable].columnKeySet") .withFeatures(COLLECTION_FEATURES) .suppressing(getIteratorUnknownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<Integer, String, Character> table = TreeBasedTable.create(); populateForColumnKeySet(table, elements); return Tables.unmodifiableRowSortedTable(table).columnKeySet(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].columnKeySet") .withFeatures(COLLECTION_FEATURES_ORDER) .suppressing(getIteratorKnownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { List<Integer> rowKeys = Lists.newArrayList(); for (int i = 0; i < elements.length; i++) { rowKeys.add(i); } Table<Integer, Character, String> table = ArrayTable.create(rowKeys, ImmutableList.of('a')); populateForValues(table, elements); return table.values(); } }) .named("ArrayTable.values") .withFeatures(CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.KNOWN_ORDER) .createTestSuite()); suite.addTest(CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return table.values(); } }) .named("HashBasedTable.values") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest(CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = TreeBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return table.values(); } }) .named("TreeBasedTable.values") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); final Function<String, String> removeFirstCharacter = new Function<String, String>() { @Override public String apply(String input) { return input.substring(1); } }; suite.addTest(CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); for (int i = 0; i < elements.length; i++) { table.put(i, 'a', "x" + checkNotNull(elements[i])); } return Tables.transformValues(table, removeFirstCharacter).values(); } }) .named("TransformValues.values") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest(CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return Tables.unmodifiableTable(table).values(); } }) .named("unmodifiableTable[HashBasedTable].values") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest(CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { RowSortedTable<Integer, Character, String> table = TreeBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return Tables.unmodifiableRowSortedTable(table).values(); } }) .named("unmodifiableTable[TreeBasedTable].values") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestCellSetGenerator() { @Override public SampleElements<Cell<String, Integer, Character>> samples() { return new SampleElements<Cell<String, Integer, Character>>( Tables.immutableCell("bar", 1, 'a'), Tables.immutableCell("bar", 2, 'b'), Tables.immutableCell("bar", 3, (Character) null), Tables.immutableCell("bar", 4, 'b'), Tables.immutableCell("bar", 5, 'b')); } @Override public Set<Cell<String, Integer, Character>> create( Object... elements) { List<Integer> columnKeys = Lists.newArrayList(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; columnKeys.add(cell.getColumnKey()); } Table<String, Integer, Character> table = ArrayTable.create(ImmutableList.of("bar"), columnKeys); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return table.cellSet(); } @Override Table<String, Integer, Character> createTable() { throw new UnsupportedOperationException(); } }) .named("ArrayTable.cellSet") .withFeatures(CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return HashBasedTable.create(); } }) .named("HashBasedTable.cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return TreeBasedTable.create(); } }) .named("TreeBasedTable.cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { Table<Integer, String, Character> original = TreeBasedTable.create(); return Tables.transpose(original); } }) .named("TransposedTable.cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return HashBasedTable.create(); } @Override public Set<Cell<String, Integer, Character>> create( Object... elements) { Table<String, Integer, Character> table = createTable(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return Tables.transformValues(table, Functions.<Character>identity()).cellSet(); } }) .named("TransformValues.cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES, CollectionFeature.SUPPORTS_REMOVE) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return Tables.unmodifiableTable(HashBasedTable.<String, Integer, Character> create()); } @Override public Set<Cell<String, Integer, Character>> create( Object... elements) { Table<String, Integer, Character> table = HashBasedTable.create(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return Tables.unmodifiableTable(table).cellSet(); } }) .named("unmodifiableTable[HashBasedTable].cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestCellSetGenerator() { @Override RowSortedTable<String, Integer, Character> createTable() { return Tables.unmodifiableRowSortedTable(TreeBasedTable .<String, Integer, Character> create()); } @Override public Set<Cell<String, Integer, Character>> create( Object... elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return Tables.unmodifiableRowSortedTable(table).cellSet(); } }) .named("unmodifiableRowSortedTable[TreeBasedTable].cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Iterable<String> rowKeys = ImmutableSet.copyOf(elements); Iterable<Integer> columnKeys = ImmutableList.of(1, 2, 3); Table<String, Integer, Character> table = ArrayTable.create(rowKeys, columnKeys); populateForRowKeySet(table, elements); return table.column(1).keySet(); } }) .named("ArrayTable.column.keySet") .withFeatures(CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return table.column(1).keySet(); } }) .named("HashBasedTable.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .suppressing(getIteratorUnknownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return table.column(1).keySet(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .suppressing(getIteratorKnownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return Tables.transformValues(table, Functions.toStringFunction()).column(1).keySet(); } }) .named("TransformValues.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .suppressing(getIteratorUnknownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return Tables.unmodifiableTable(table).column(1).keySet(); } }) .named("unmodifiableTable[HashBasedTable].column.keySet") .withFeatures(COLLECTION_FEATURES) .suppressing(getIteratorUnknownOrderRemoveSupportedMethod()) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return Tables.unmodifiableRowSortedTable(table).column(1).keySet(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].column.keySet") .withFeatures(COLLECTION_FEATURES_ORDER) .suppressing(getIteratorKnownOrderRemoveSupportedMethod()) .createTestSuite()); return suite; } private static void populateForRowKeySet( Table<String, Integer, Character> table, String[] elements) { for (String row : elements) { table.put(row, 1, 'a'); table.put(row, 2, 'b'); } } private static void populateForColumnKeySet( Table<Integer, String, Character> table, String[] elements) { for (String column : elements) { table.put(1, column, 'a'); table.put(2, column, 'b'); } } private static void populateForValues( Table<Integer, Character, String> table, String[] elements) { for (int i = 0; i < elements.length; i++) { table.put(i, 'a', elements[i]); } } private static abstract class TestCellSetGenerator implements TestSetGenerator<Cell<String, Integer, Character>> { @Override public SampleElements<Cell<String, Integer, Character>> samples() { return new SampleElements<Cell<String, Integer, Character>>( Tables.immutableCell("bar", 1, 'a'), Tables.immutableCell("bar", 2, 'b'), Tables.immutableCell("foo", 3, 'c'), Tables.immutableCell("bar", 1, 'b'), Tables.immutableCell("cat", 2, 'b')); } @Override public Set<Cell<String, Integer, Character>> create( Object... elements) { Table<String, Integer, Character> table = createTable(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return table.cellSet(); } abstract Table<String, Integer, Character> createTable(); @Override @SuppressWarnings("unchecked") public Cell<String, Integer, Character>[] createArray(int length) { return (Cell<String, Integer, Character>[]) new Cell<?, ?, ?>[length]; } @Override public List<Cell<String, Integer, Character>> order( List<Cell<String, Integer, Character>> insertionOrder) { return insertionOrder; } } private static abstract class MapTests extends MapInterfaceTest<String, Integer> { MapTests(boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(false, allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } @Override protected String getKeyNotInPopulatedMap() { return "four"; } @Override protected Integer getValueNotInPopulatedMap() { return 4; } } private static abstract class RowTests extends MapTests { RowTests(boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<Character, String, Integer> makeTable(); @Override protected Map<String, Integer> makeEmptyMap() { return makeTable().row('a'); } @Override protected Map<String, Integer> makePopulatedMap() { Table<Character, String, Integer> table = makeTable(); table.put('a', "one", 1); table.put('a', "two", 2); table.put('a', "three", 3); table.put('b', "four", 4); return table.row('a'); } } @GwtIncompatible("TODO(hhchan): ArrayTable") public static class ArrayRowTests extends RowTests { public ArrayRowTests() { super(true, true, false, false, false); } @Override protected String getKeyNotInPopulatedMap() { throw new UnsupportedOperationException(); } @Override protected Map<String, Integer> makeEmptyMap() { throw new UnsupportedOperationException(); } @Override protected Table<Character, String, Integer> makeTable() { return ArrayTable.create(Arrays.asList('a', 'b', 'c'), Arrays.asList("one", "two", "three", "four")); } } public static class HashRowTests extends RowTests { public HashRowTests() { super(false, true, true, true, true); } @Override Table<Character, String, Integer> makeTable() { return HashBasedTable.create(); } } public static class TreeRowTests extends RowTests { public TreeRowTests() { super(false, true, true, true, true); } @Override Table<Character, String, Integer> makeTable() { return TreeBasedTable.create(); } } public static class TransposeRowTests extends RowTests { public TransposeRowTests() { super(false, true, true, true, false); } @Override Table<Character, String, Integer> makeTable() { Table<String, Character, Integer> original = TreeBasedTable.create(); return Tables.transpose(original); } } private static final Function<Integer, Integer> DIVIDE_BY_2 = new Function<Integer, Integer>() { @Override public Integer apply(Integer input) { return (input == null) ? null : input / 2; } }; public static class TransformValueRowTests extends RowTests { public TransformValueRowTests() { super(false, false, true, true, true); } @Override Table<Character, String, Integer> makeTable() { Table<Character, String, Integer> table = HashBasedTable.create(); return Tables.transformValues(table, DIVIDE_BY_2); } @Override protected Map<String, Integer> makePopulatedMap() { Table<Character, String, Integer> table = HashBasedTable.create(); table.put('a', "one", 2); table.put('a', "two", 4); table.put('a', "three", 6); table.put('b', "four", 8); return Tables.transformValues(table, DIVIDE_BY_2).row('a'); } } public static class UnmodifiableHashRowTests extends RowTests { public UnmodifiableHashRowTests() { super(false, false, false, false, false); } @Override Table<Character, String, Integer> makeTable() { Table<Character, String, Integer> table = HashBasedTable.create(); return Tables.unmodifiableTable(table); } @Override protected Map<String, Integer> makePopulatedMap() { Table<Character, String, Integer> table = HashBasedTable.create(); table.put('a', "one", 1); table.put('a', "two", 2); table.put('a', "three", 3); table.put('b', "four", 4); return Tables.unmodifiableTable(table).row('a'); } } public static class UnmodifiableTreeRowTests extends RowTests { public UnmodifiableTreeRowTests() { super(false, false, false, false, false); } @Override Table<Character, String, Integer> makeTable() { RowSortedTable<Character, String, Integer> table = TreeBasedTable.create(); return Tables.unmodifiableRowSortedTable(table); } @Override protected Map<String, Integer> makePopulatedMap() { RowSortedTable<Character, String, Integer> table = TreeBasedTable.create(); table.put('a', "one", 1); table.put('a', "two", 2); table.put('a', "three", 3); table.put('b', "four", 4); return Tables.unmodifiableRowSortedTable(table).row('a'); } } private static abstract class ColumnTests extends MapTests { ColumnTests(boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<String, Character, Integer> makeTable(); @Override protected Map<String, Integer> makeEmptyMap() { return makeTable().column('a'); } @Override protected Map<String, Integer> makePopulatedMap() { Table<String, Character, Integer> table = makeTable(); table.put("one", 'a', 1); table.put("two", 'a', 2); table.put("three", 'a', 3); table.put("four", 'b', 4); return table.column('a'); } } @GwtIncompatible("TODO(hhchan): ArrayTable") public static class ArrayColumnTests extends ColumnTests { public ArrayColumnTests() { super(true, true, false, false, false); } @Override protected String getKeyNotInPopulatedMap() { throw new UnsupportedOperationException(); } @Override protected Map<String, Integer> makeEmptyMap() { throw new UnsupportedOperationException(); } @Override Table<String, Character, Integer> makeTable() { return ArrayTable.create(Arrays.asList("one", "two", "three", "four"), Arrays.asList('a', 'b', 'c')); } } public static class HashColumnTests extends ColumnTests { public HashColumnTests() { super(false, true, true, true, false); } @Override Table<String, Character, Integer> makeTable() { return HashBasedTable.create(); } } public static class TreeColumnTests extends ColumnTests { public TreeColumnTests() { super(false, true, true, true, false); } @Override Table<String, Character, Integer> makeTable() { return TreeBasedTable.create(); } } public static class TransposeColumnTests extends ColumnTests { public TransposeColumnTests() { super(false, true, true, true, true); } @Override Table<String, Character, Integer> makeTable() { Table<Character, String, Integer> original = TreeBasedTable.create(); return Tables.transpose(original); } } public static class TransformValueColumnTests extends ColumnTests { public TransformValueColumnTests() { super(false, false, true, true, false); } @Override Table<String, Character, Integer> makeTable() { Table<String, Character, Integer> table = HashBasedTable.create(); return Tables.transformValues(table, DIVIDE_BY_2); } @Override protected Map<String, Integer> makePopulatedMap() { Table<String, Character, Integer> table = HashBasedTable.create(); table.put("one", 'a', 1); table.put("two", 'a', 2); table.put("three", 'a', 3); table.put("four", 'b', 4); return Tables.transformValues(table, DIVIDE_BY_2).column('a'); } } public static class UnmodifiableHashColumnTests extends ColumnTests { public UnmodifiableHashColumnTests() { super(false, false, false, false, false); } @Override Table<String, Character, Integer> makeTable() { Table<String, Character, Integer> table = HashBasedTable.create(); return Tables.unmodifiableTable(table); } @Override protected Map<String, Integer> makePopulatedMap() { Table<String, Character, Integer> table = HashBasedTable.create(); table.put("one", 'a', 1); table.put("two", 'a', 2); table.put("three", 'a', 3); table.put("four", 'b', 4); return Tables.unmodifiableTable(table).column('a'); } } public static class UnmodifiableTreeColumnTests extends ColumnTests { public UnmodifiableTreeColumnTests() { super(false, false, false, false, false); } @Override Table<String, Character, Integer> makeTable() { RowSortedTable<String, Character, Integer> table = TreeBasedTable.create(); return Tables.unmodifiableRowSortedTable(table); } @Override protected Map<String, Integer> makePopulatedMap() { RowSortedTable<String, Character, Integer> table = TreeBasedTable.create(); table.put("one", 'a', 1); table.put("two", 'a', 2); table.put("three", 'a', 3); table.put("four", 'b', 4); return Tables.unmodifiableRowSortedTable(table).column('a'); } } private static abstract class MapMapTests extends MapInterfaceTest<String, Map<Integer, Character>> { MapMapTests(boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(false, allowsNullValues, false, supportsRemove, supportsClear, supportsIteratorRemove); } @Override protected String getKeyNotInPopulatedMap() { return "cat"; } @Override protected Map<Integer, Character> getValueNotInPopulatedMap() { return ImmutableMap.of(); } /** * The version of this test supplied by {@link MapInterfaceTest} fails for * this particular map implementation, because {@code map.get()} returns a * view collection that changes in the course of a call to {@code remove()}. * Thus, the expectation doesn't hold that {@code map.remove(x)} returns the * same value which {@code map.get(x)} did immediately beforehand. */ @Override public void testRemove() { final Map<String, Map<Integer, Character>> map; final String keyToRemove; try { map = makePopulatedMap(); } catch (UnsupportedOperationException e) { return; } keyToRemove = map.keySet().iterator().next(); if (supportsRemove) { int initialSize = map.size(); map.get(keyToRemove); map.remove(keyToRemove); // This line doesn't hold - see the Javadoc comments above. // assertEquals(expectedValue, oldValue); assertFalse(map.containsKey(keyToRemove)); assertEquals(initialSize - 1, map.size()); } else { try { map.remove(keyToRemove); fail("Expected UnsupportedOperationException."); } catch (UnsupportedOperationException e) { // Expected. } } assertInvariants(map); } } private static abstract class RowMapTests extends MapMapTests { RowMapTests(boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<String, Integer, Character> makeTable(); @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<String, Integer, Character> table = makeTable(); populateTable(table); return table.rowMap(); } void populateTable(Table<String, Integer, Character> table) { table.put("foo", 1, 'a'); table.put("bar", 1, 'b'); table.put("foo", 3, 'c'); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().rowMap(); } } @GwtIncompatible("TODO(hhchan): ArrayTable") public static class ArrayRowMapTests extends RowMapTests { public ArrayRowMapTests() { super(true, false, false, false); } @Override Table<String, Integer, Character> makeTable() { return ArrayTable.create(Arrays.asList("foo", "bar", "dog"), Arrays.asList(1, 2, 3)); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { throw new UnsupportedOperationException(); } } public static class HashRowMapTests extends RowMapTests { public HashRowMapTests() { super(false, true, true, true); } @Override Table<String, Integer, Character> makeTable() { return HashBasedTable.create(); } } public static class TreeRowMapTests extends RowMapTests { public TreeRowMapTests() { super(false, true, true, true); } @Override Table<String, Integer, Character> makeTable() { return TreeBasedTable.create(); } } public static class TreeRowMapHeadMapTests extends RowMapTests { public TreeRowMapHeadMapTests() { super(false, true, true, true); } @Override TreeBasedTable<String, Integer, Character> makeTable() { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); table.put("z", 1, 'a'); return table; } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { TreeBasedTable<String, Integer, Character> table = makeTable(); populateTable(table); return table.rowMap().headMap("x"); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().rowMap().headMap("x"); } @Override protected String getKeyNotInPopulatedMap() { return "z"; } } public static class TreeRowMapTailMapTests extends RowMapTests { public TreeRowMapTailMapTests() { super(false, true, true, true); } @Override TreeBasedTable<String, Integer, Character> makeTable() { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); table.put("a", 1, 'a'); return table; } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { TreeBasedTable<String, Integer, Character> table = makeTable(); populateTable(table); return table.rowMap().tailMap("b"); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().rowMap().tailMap("b"); } @Override protected String getKeyNotInPopulatedMap() { return "a"; } } public static class TreeRowMapSubMapTests extends RowMapTests { public TreeRowMapSubMapTests() { super(false, true, true, true); } @Override TreeBasedTable<String, Integer, Character> makeTable() { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); table.put("a", 1, 'a'); table.put("z", 1, 'a'); return table; } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { TreeBasedTable<String, Integer, Character> table = makeTable(); populateTable(table); return table.rowMap().subMap("b", "x"); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().rowMap().subMap("b", "x"); } @Override protected String getKeyNotInPopulatedMap() { return "z"; } } private static final Function<String, Character> FIRST_CHARACTER = new Function<String, Character>() { @Override public Character apply(String input) { return input == null ? null : input.charAt(0); } }; public static class TransformValueRowMapTests extends RowMapTests { public TransformValueRowMapTests() { super(false, true, true, true); } @Override Table<String, Integer, Character> makeTable() { Table<String, Integer, String> original = HashBasedTable.create(); return Tables.transformValues(original, FIRST_CHARACTER); } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<String, Integer, String> table = HashBasedTable.create(); table.put("foo", 1, "apple"); table.put("bar", 1, "banana"); table.put("foo", 3, "cat"); return Tables.transformValues(table, FIRST_CHARACTER).rowMap(); } } public static class UnmodifiableHashRowMapTests extends RowMapTests { public UnmodifiableHashRowMapTests() { super(false, false, false, false); } @Override Table<String, Integer, Character> makeTable() { Table<String, Integer, Character> original = HashBasedTable.create(); return Tables.unmodifiableTable(original); } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<String, Integer, Character> table = HashBasedTable.create(); table.put("foo", 1, 'a'); table.put("bar", 1, 'b'); table.put("foo", 3, 'c'); return Tables.unmodifiableTable(table).rowMap(); } } public static class UnmodifiableTreeRowMapTests extends RowMapTests { public UnmodifiableTreeRowMapTests() { super(false, false, false, false); } @Override RowSortedTable<String, Integer, Character> makeTable() { RowSortedTable<String, Integer, Character> original = TreeBasedTable.create(); return Tables.unmodifiableRowSortedTable(original); } @Override protected SortedMap<String, Map<Integer, Character>> makePopulatedMap() { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); table.put("foo", 1, 'a'); table.put("bar", 1, 'b'); table.put("foo", 3, 'c'); return Tables.unmodifiableRowSortedTable(table).rowMap(); } } private static abstract class ColumnMapTests extends MapMapTests { ColumnMapTests(boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<Integer, String, Character> makeTable(); @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<Integer, String, Character> table = makeTable(); table.put(1, "foo", 'a'); table.put(1, "bar", 'b'); table.put(3, "foo", 'c'); return table.columnMap(); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().columnMap(); } } @GwtIncompatible("TODO(hhchan): ArrayTable") public static class ArrayColumnMapTests extends ColumnMapTests { public ArrayColumnMapTests() { super(true, false, false, false); } @Override Table<Integer, String, Character> makeTable() { return ArrayTable.create(Arrays.asList(1, 2, 3), Arrays.asList("foo", "bar", "dog")); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { throw new UnsupportedOperationException(); } } public static class HashColumnMapTests extends ColumnMapTests { public HashColumnMapTests() { super(false, true, true, false); } @Override Table<Integer, String, Character> makeTable() { return HashBasedTable.create(); } } public static class TreeColumnMapTests extends ColumnMapTests { public TreeColumnMapTests() { super(false, true, true, false); } @Override Table<Integer, String, Character> makeTable() { return TreeBasedTable.create(); } } public static class TransformValueColumnMapTests extends ColumnMapTests { public TransformValueColumnMapTests() { super(false, true, true, false); } @Override Table<Integer, String, Character> makeTable() { Table<Integer, String, String> original = HashBasedTable.create(); return Tables.transformValues(original, FIRST_CHARACTER); } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<Integer, String, String> table = HashBasedTable.create(); table.put(1, "foo", "apple"); table.put(1, "bar", "banana"); table.put(3, "foo", "cat"); return Tables.transformValues(table, FIRST_CHARACTER).columnMap(); } } public static class UnmodifiableHashColumnMapTests extends ColumnMapTests { public UnmodifiableHashColumnMapTests() { super(false, false, false, false); } @Override Table<Integer, String, Character> makeTable() { Table<Integer, String, Character> original = HashBasedTable.create(); return Tables.unmodifiableTable(original); } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<Integer, String, Character> table = HashBasedTable.create(); table.put(1, "foo", 'a'); table.put(1, "bar", 'b'); table.put(3, "foo", 'c'); return Tables.unmodifiableTable(table).columnMap(); } } public static class UnmodifiableTreeColumnMapTests extends ColumnMapTests { public UnmodifiableTreeColumnMapTests() { super(false, false, false, false); } @Override Table<Integer, String, Character> makeTable() { RowSortedTable<Integer, String, Character> original = TreeBasedTable.create(); return Tables.unmodifiableRowSortedTable(original); } @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { RowSortedTable<Integer, String, Character> table = TreeBasedTable.create(); table.put(1, "foo", 'a'); table.put(1, "bar", 'b'); table.put(3, "foo", 'c'); return Tables.unmodifiableRowSortedTable(table).columnMap(); } } }
package org.adligo.models.core.shared.util; import java.util.Date; import org.adligo.i.util.shared.ClassUtils; import org.adligo.i.util.shared.DateTime; import org.adligo.i.util.shared.I_TextFormatter; import org.adligo.i.util.shared.TextFormatter; import org.adligo.models.core.shared.InvalidParameterException; import org.adligo.models.core.shared.ModelsCoreConstantsObtainer; import org.adligo.models.core.shared.ValidationException; public class DateRangeMutant implements I_DateRange, I_DateRangeMutant { public static final String IS_VALID_WITHOUT_NULLS = "isValidWithoutNulls"; public static final String SET_ENDED = "setEnded"; private Long start; private Long end; public DateRangeMutant() {} public DateRangeMutant(I_DateRange p) throws InvalidParameterException { setStart(p.getStart()); setEnd(p.getEnd()); } /* (non-Javadoc) * @see org.adligo.models.core.client.util.I_DateRange#getStarted() */ public Long getStart() { return start; } /* (non-Javadoc) * @see org.adligo.models.core.client.util.I_DateRangeMutant#setStarted(java.lang.Long) */ public void setStart(Long p) throws InvalidParameterException { if (end != null) { if (p != null) { long el = end.longValue(); long pl = p.longValue(); if (pl > el) { throw new InvalidParameterException(ModelsCoreConstantsObtainer.getConstants() .getStartOfDateRangeMustBeBeforeEnd(), SET_ENDED); } } } start = p; } /** * protected only for hibernate * @return */ protected Date getStartDate() { if (start == null) { return null; } return new Date(start.longValue()); } protected void setStartDate(Date p) { if (p == null) { //allow null to come from the db return; } start = new Long(p.getTime()); } /* (non-Javadoc) * @see org.adligo.models.core.client.util.I_DateRange#getEnded() */ public Long getEnd() { return end; } /* (non-Javadoc) * @see org.adligo.models.core.client.util.I_DateRangeMutant#setEnded(java.lang.Long) */ public void setEnd(Long p) throws InvalidParameterException { if (start != null) { if (p != null) { long pl = p.longValue(); long sl = start.longValue(); if (pl < sl) { throw new InvalidParameterException(ModelsCoreConstantsObtainer.getConstants() .getEndOfDateRangeMustBeAfterStart(), SET_ENDED); } } } end = p; } /** * protected only for hibernate * @return */ protected Date getEndDate() { if (end == null) { return null; } return new Date(end.longValue()); } protected void setEndDate(Date p) { if (p == null) { //allow null to come from the db return; } long time = p.getTime(); end = new Long(time); } /** * note this only checks if the start is before the end * either value may be null, * use the method isValidWithoutNulls if values are required */ public void isValid() throws ValidationException { try { DateRangeMutant other = new DateRangeMutant(); other.setEnd(end); other.setStart(start); } catch (InvalidParameterException ipe) { //this code is unreachable but necessary for the conversion of possible exceptions throw new ValidationException(ipe); } } public void isValidWithoutNulls() throws ValidationException { if (start == null) { throw new ValidationException(ModelsCoreConstantsObtainer.getConstants() .getDateRangeRequiresStartValue(),IS_VALID_WITHOUT_NULLS); } if (end == null) { throw new ValidationException(ModelsCoreConstantsObtainer.getConstants() .getDateRangeRequiresEndValue(),IS_VALID_WITHOUT_NULLS); } isValid(); } public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((end == null) ? 0 : end.hashCode()); result = prime * result + ((start == null) ? 0 : start.hashCode()); return result; } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; try { I_DateRange other = (I_DateRange) obj; if (end == null) { if (other.getEnd() != null) return false; } else if (!end.equals(other.getEnd())) return false; if (start == null) { if (other.getStart() != null) return false; } else if (!start.equals(other.getStart())) return false; return true; } catch (ClassCastException e) { return false; } } /** * @return returns true if the this * starts before and ends after the dr argument object * */ public boolean overlaps(I_DateRange dr) { long drEnd = Long.MAX_VALUE; long drStart = Long.MIN_VALUE; Long dre = dr.getEnd(); if (dre != null) { drEnd = dre.longValue(); } Long drs = dr.getStart(); if (drs!= null) { drStart = drs.longValue(); } long el = end.longValue(); long sl = start.longValue(); if (drEnd >= sl && drEnd <= el) { return true; } if (drStart >= sl && drStart <= el) { return true; } return false; } /** * returns * @param dr * @return true if the date range passed in starts after this and ends before this. */ public boolean contains(I_DateRange dr) { long drEnd = Long.MAX_VALUE; long drStart = Long.MIN_VALUE; Long dre = dr.getEnd(); if (dre != null) { drEnd = dre.longValue(); } Long drs = dr.getStart(); if (drs != null) { drStart = drs.longValue(); } if (start == null) { return false; } if (end == null) { return false; } long sl = start.longValue(); long el = end.longValue(); if (sl <= drStart && el >= drEnd) { return true; } return false; } public String toString() { return toString(DateRangeMutant.class); } public String toString(Class c) { I_TextFormatter formatter = TextFormatter.getInstance(); String startedString = null; if (start != null) { startedString = formatter.formatDate(DateTime.DEFAULT_DATE_TIME_FORMAT, start.longValue()); } String endedString = null; if (end != null) { endedString = formatter.formatDate(DateTime.DEFAULT_DATE_TIME_FORMAT, end.longValue()); } return "" + ClassUtils.getClassShortName(c) + " [" + startedString + "-" + endedString + "]"; } public Long getDuration() { if (start == null || end == null) { return null; } long el = end.longValue(); long sl = start.longValue(); return new Long(el - sl); } public boolean contains(Long time) { long tl = time.longValue(); long el = end.longValue(); long sl = start.longValue(); if (tl >= sl && tl <= el) { return true; } return false; } }
package com.hubspot.singularity; import static com.google.common.base.Preconditions.checkNotNull; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; public class SingularityRequestBuilder { private String id; private RequestType requestType; private Optional<List<String>> owners; private Optional<Integer> numRetriesOnFailure; private Optional<String> schedule; private Optional<String> quartzSchedule; private Optional<String> scheduleTimeZone; private Optional<ScheduleType> scheduleType; private Optional<Long> killOldNonLongRunningTasksAfterMillis; private Optional<Long> taskExecutionTimeLimitMillis; private Optional<Long> scheduledExpectedRuntimeMillis; private Optional<Long> waitAtLeastMillisAfterTaskFinishesForReschedule; private Optional<Integer> instances; private Optional<Boolean> skipHealthchecks; private Optional<Boolean> rackSensitive; private Optional<List<String>> rackAffinity; private Optional<AgentPlacement> agentPlacement; private Optional<Map<String, String>> requiredAgentAttributes; private Optional<Map<String, String>> allowedAgentAttributes; private Optional<Map<String, Map<String, Integer>>> agentAttributeMinimums; private Optional<Boolean> loadBalanced; private Optional<String> requiredRole; private Optional<String> group; private Optional<Set<String>> readWriteGroups; private Optional<Set<String>> readOnlyGroups; private Optional<Map<String, Set<SingularityUserFacingAction>>> actionPermissions; private Optional<Boolean> bounceAfterScale; private Optional<Map<SingularityEmailType, List<SingularityEmailDestination>>> emailConfigurationOverrides; private Optional<Boolean> hideEvenNumberAcrossRacksHint; private Optional<String> taskLogErrorRegex; private Optional<Boolean> taskLogErrorRegexCaseSensitive; private Optional<Double> taskPriorityLevel; private Optional<Integer> maxTasksPerOffer; private Optional<Boolean> allowBounceToSameHost; @Deprecated private Optional<String> dataCenter; public SingularityRequestBuilder(String id, RequestType requestType) { this.id = checkNotNull(id, "id cannot be null"); this.requestType = checkNotNull(requestType, "requestType cannot be null"); this.owners = Optional.empty(); this.numRetriesOnFailure = Optional.empty(); this.schedule = Optional.empty(); this.scheduleType = Optional.empty(); this.killOldNonLongRunningTasksAfterMillis = Optional.empty(); this.taskExecutionTimeLimitMillis = Optional.empty(); this.instances = Optional.empty(); this.rackSensitive = Optional.empty(); this.loadBalanced = Optional.empty(); this.quartzSchedule = Optional.empty(); this.scheduleTimeZone = Optional.empty(); this.rackAffinity = Optional.empty(); this.agentPlacement = Optional.empty(); this.requiredAgentAttributes = Optional.empty(); this.allowedAgentAttributes = Optional.empty(); this.agentAttributeMinimums = Optional.empty(); this.scheduledExpectedRuntimeMillis = Optional.empty(); this.waitAtLeastMillisAfterTaskFinishesForReschedule = Optional.empty(); this.group = Optional.empty(); this.readWriteGroups = Optional.empty(); this.readOnlyGroups = Optional.empty(); this.actionPermissions = Optional.empty(); this.bounceAfterScale = Optional.empty(); this.emailConfigurationOverrides = Optional.empty(); this.skipHealthchecks = Optional.empty(); this.hideEvenNumberAcrossRacksHint = Optional.empty(); this.taskLogErrorRegex = Optional.empty(); this.taskLogErrorRegexCaseSensitive = Optional.empty(); this.taskPriorityLevel = Optional.empty(); this.maxTasksPerOffer = Optional.empty(); this.allowBounceToSameHost = Optional.empty(); this.requiredRole = Optional.empty(); this.dataCenter = Optional.empty(); } public SingularityRequest build() { return new SingularityRequest( id, requestType, owners, numRetriesOnFailure, schedule, instances, rackSensitive, loadBalanced, killOldNonLongRunningTasksAfterMillis, taskExecutionTimeLimitMillis, scheduleType, quartzSchedule, scheduleTimeZone, rackAffinity, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), scheduledExpectedRuntimeMillis, waitAtLeastMillisAfterTaskFinishesForReschedule, group, readWriteGroups, readOnlyGroups, actionPermissions, bounceAfterScale, skipHealthchecks, emailConfigurationOverrides, Optional.<Boolean>empty(), hideEvenNumberAcrossRacksHint, taskLogErrorRegex, taskLogErrorRegexCaseSensitive, taskPriorityLevel, maxTasksPerOffer, allowBounceToSameHost, requiredRole, dataCenter, requiredAgentAttributes, allowedAgentAttributes, agentAttributeMinimums, agentPlacement ); } public Optional<Boolean> getSkipHealthchecks() { return skipHealthchecks; } public SingularityRequestBuilder setSkipHealthchecks( Optional<Boolean> skipHealthchecks ) { this.skipHealthchecks = skipHealthchecks; return this; } public Optional<Boolean> getLoadBalanced() { return loadBalanced; } public SingularityRequestBuilder setLoadBalanced(Optional<Boolean> loadBalanced) { this.loadBalanced = loadBalanced; return this; } public String getId() { return id; } public SingularityRequestBuilder setId(String id) { this.id = id; return this; } public Optional<List<String>> getOwners() { return owners; } public SingularityRequestBuilder setOwners(Optional<List<String>> owners) { this.owners = owners; return this; } public Optional<Integer> getNumRetriesOnFailure() { return numRetriesOnFailure; } public SingularityRequestBuilder setNumRetriesOnFailure( Optional<Integer> numRetriesOnFailure ) { this.numRetriesOnFailure = numRetriesOnFailure; return this; } public Optional<String> getSchedule() { return schedule; } public SingularityRequestBuilder setSchedule(Optional<String> schedule) { this.schedule = schedule; return this; } public Optional<Integer> getInstances() { return instances; } public SingularityRequestBuilder setInstances(Optional<Integer> instances) { this.instances = instances; return this; } public SingularityRequestBuilder setRequiredRole(Optional<String> requiredRole) { this.requiredRole = requiredRole; return this; } public Optional<Boolean> getRackSensitive() { return rackSensitive; } public SingularityRequestBuilder setRackSensitive(Optional<Boolean> rackSensitive) { this.rackSensitive = rackSensitive; return this; } public Optional<Long> getKillOldNonLongRunningTasksAfterMillis() { return killOldNonLongRunningTasksAfterMillis; } public SingularityRequestBuilder setKillOldNonLongRunningTasksAfterMillis( Optional<Long> killOldNonLongRunningTasksAfterMillis ) { this.killOldNonLongRunningTasksAfterMillis = killOldNonLongRunningTasksAfterMillis; return this; } public Optional<Long> getTaskExecutionTimeLimitMillis() { return taskExecutionTimeLimitMillis; } public SingularityRequestBuilder setTaskExecutionTimeLimitMillis( Optional<Long> taskExecutionTimeLimitMillis ) { this.taskExecutionTimeLimitMillis = taskExecutionTimeLimitMillis; return this; } public Optional<ScheduleType> getScheduleType() { return scheduleType; } public SingularityRequestBuilder setScheduleType(Optional<ScheduleType> scheduleType) { this.scheduleType = scheduleType; return this; } public Optional<String> getQuartzSchedule() { return quartzSchedule; } public SingularityRequestBuilder setQuartzSchedule(Optional<String> quartzSchedule) { this.quartzSchedule = quartzSchedule; return this; } public Optional<String> getScheduleTimeZone() { return scheduleTimeZone; } public SingularityRequestBuilder setScheduleTimeZone( Optional<String> scheduleTimeZone ) { this.scheduleTimeZone = scheduleTimeZone; return this; } public Optional<List<String>> getRackAffinity() { return rackAffinity; } public SingularityRequestBuilder setRackAffinity(Optional<List<String>> rackAffinity) { this.rackAffinity = rackAffinity; return this; } public Optional<AgentPlacement> getAgentPlacement() { return agentPlacement; } public SingularityRequestBuilder setAgentPlacement( Optional<AgentPlacement> agentPlacement ) { this.agentPlacement = agentPlacement; return this; } @Deprecated public Optional<SlavePlacement> getSlavePlacement() { return agentPlacement.map(a -> SlavePlacement.valueOf(a.name())); } @Deprecated public SingularityRequestBuilder setSlavePlacement( Optional<SlavePlacement> agentPlacement ) { this.agentPlacement = agentPlacement.map(s -> AgentPlacement.valueOf(s.name())); return this; } public Optional<Long> getScheduledExpectedRuntimeMillis() { return scheduledExpectedRuntimeMillis; } public SingularityRequestBuilder setScheduledExpectedRuntimeMillis( Optional<Long> scheduledExpectedRuntimeMillis ) { this.scheduledExpectedRuntimeMillis = scheduledExpectedRuntimeMillis; return this; } public RequestType getRequestType() { return requestType; } public SingularityRequestBuilder setRequestType(RequestType requestType) { this.requestType = requestType; return this; } public Optional<Long> getWaitAtLeastMillisAfterTaskFinishesForReschedule() { return waitAtLeastMillisAfterTaskFinishesForReschedule; } public SingularityRequestBuilder setWaitAtLeastMillisAfterTaskFinishesForReschedule( Optional<Long> waitAtLeastMillisAfterTaskFinishesForReschedule ) { this.waitAtLeastMillisAfterTaskFinishesForReschedule = waitAtLeastMillisAfterTaskFinishesForReschedule; return this; } public Optional<String> getGroup() { return group; } public SingularityRequestBuilder setGroup(Optional<String> group) { this.group = group; return this; } public Optional<Set<String>> getReadWriteGroups() { return readWriteGroups; } public SingularityRequestBuilder setReadWriteGroups( Optional<Set<String>> readWriteGroups ) { this.readWriteGroups = readWriteGroups; return this; } public Optional<Map<String, Set<SingularityUserFacingAction>>> getActionPermissions() { return actionPermissions; } public SingularityRequestBuilder setActionPermissions( Optional<Map<String, Set<SingularityUserFacingAction>>> getActionPermissions ) { this.actionPermissions = getActionPermissions; return this; } public SingularityRequestBuilder setRequiredAgentAttributes( Optional<Map<String, String>> requiredAgentAttributes ) { this.requiredAgentAttributes = requiredAgentAttributes; return this; } public SingularityRequestBuilder setAllowedAgentAttributes( Optional<Map<String, String>> allowedAgentAttributes ) { this.allowedAgentAttributes = allowedAgentAttributes; return this; } public SingularityRequestBuilder setAgentAttributeMinimums( Optional<Map<String, Map<String, Integer>>> agentAttributeMinimums ) { this.agentAttributeMinimums = agentAttributeMinimums; return this; } @Deprecated public SingularityRequestBuilder setRequiredSlaveAttributes( Optional<Map<String, String>> requiredAgentAttributes ) { this.requiredAgentAttributes = requiredAgentAttributes; return this; } @Deprecated public SingularityRequestBuilder setAllowedSlaveAttributes( Optional<Map<String, String>> allowedAgentAttributes ) { this.allowedAgentAttributes = allowedAgentAttributes; return this; } @Deprecated public SingularityRequestBuilder setSlaveAttributeMinimums( Optional<Map<String, Map<String, Integer>>> agentAttributeMinimums ) { this.agentAttributeMinimums = agentAttributeMinimums; return this; } public Optional<Set<String>> getReadOnlyGroups() { return readOnlyGroups; } public SingularityRequestBuilder setReadOnlyGroups( Optional<Set<String>> readOnlyGroups ) { this.readOnlyGroups = readOnlyGroups; return this; } public Optional<Boolean> getBounceAfterScale() { return bounceAfterScale; } public SingularityRequestBuilder setBounceAfterScale( Optional<Boolean> bounceAfterScale ) { this.bounceAfterScale = bounceAfterScale; return this; } public Optional<Map<SingularityEmailType, List<SingularityEmailDestination>>> getEmailConfigurationOverrides() { return emailConfigurationOverrides; } public SingularityRequestBuilder setEmailConfigurationOverrides( Optional<Map<SingularityEmailType, List<SingularityEmailDestination>>> emailConfigurationOverrides ) { this.emailConfigurationOverrides = emailConfigurationOverrides; return this; } public Optional<Boolean> getHideEvenNumberAcrossRacksHint() { return hideEvenNumberAcrossRacksHint; } public SingularityRequestBuilder setHideEvenNumberAcrossRacksHint( Optional<Boolean> hideEvenNumberAcrossRacksHint ) { this.hideEvenNumberAcrossRacksHint = hideEvenNumberAcrossRacksHint; return this; } public Optional<String> getTaskLogErrorRegex() { return taskLogErrorRegex; } public SingularityRequestBuilder setTaskLogErrorRegex( Optional<String> taskLogErrorRegex ) { this.taskLogErrorRegex = taskLogErrorRegex; return this; } public Optional<Boolean> getTaskLogErrorRegexCaseSensitive() { return taskLogErrorRegexCaseSensitive; } public SingularityRequestBuilder setTaskLogErrorRegexCaseSensitive( Optional<Boolean> taskLogErrorRegexCaseSensitive ) { this.taskLogErrorRegexCaseSensitive = taskLogErrorRegexCaseSensitive; return this; } public Optional<Double> getTaskPriorityLevel() { return taskPriorityLevel; } public SingularityRequestBuilder setTaskPriorityLevel( Optional<Double> taskPriorityLevel ) { this.taskPriorityLevel = taskPriorityLevel; return this; } public Optional<Integer> getMaxTasksPerOffer() { return maxTasksPerOffer; } public SingularityRequestBuilder setMaxTasksPerOffer( Optional<Integer> maxTasksPerOffer ) { this.maxTasksPerOffer = maxTasksPerOffer; return this; } public Optional<Boolean> getAllowBounceToSameHost() { return allowBounceToSameHost; } public SingularityRequestBuilder setAllowBounceToSameHost( Optional<Boolean> allowBounceToSameHost ) { this.allowBounceToSameHost = allowBounceToSameHost; return this; } @Deprecated public Optional<String> getDataCenter() { return dataCenter; } @Deprecated public SingularityRequestBuilder setDataCenter(Optional<String> dataCenter) { this.dataCenter = dataCenter; return this; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SingularityRequestBuilder that = (SingularityRequestBuilder) o; return ( Objects.equals(id, that.id) && requestType == that.requestType && Objects.equals(owners, that.owners) && Objects.equals(numRetriesOnFailure, that.numRetriesOnFailure) && Objects.equals(schedule, that.schedule) && Objects.equals(quartzSchedule, that.quartzSchedule) && Objects.equals(scheduleTimeZone, that.scheduleTimeZone) && Objects.equals(scheduleType, that.scheduleType) && Objects.equals( killOldNonLongRunningTasksAfterMillis, that.killOldNonLongRunningTasksAfterMillis ) && Objects.equals(taskExecutionTimeLimitMillis, that.taskExecutionTimeLimitMillis) && Objects.equals( scheduledExpectedRuntimeMillis, that.scheduledExpectedRuntimeMillis ) && Objects.equals( waitAtLeastMillisAfterTaskFinishesForReschedule, that.waitAtLeastMillisAfterTaskFinishesForReschedule ) && Objects.equals(instances, that.instances) && Objects.equals(skipHealthchecks, that.skipHealthchecks) && Objects.equals(rackSensitive, that.rackSensitive) && Objects.equals(rackAffinity, that.rackAffinity) && Objects.equals(agentPlacement, that.agentPlacement) && Objects.equals(requiredAgentAttributes, that.requiredAgentAttributes) && Objects.equals(allowedAgentAttributes, that.allowedAgentAttributes) && Objects.equals(agentAttributeMinimums, that.agentAttributeMinimums) && Objects.equals(loadBalanced, that.loadBalanced) && Objects.equals(requiredRole, that.requiredRole) && Objects.equals(group, that.group) && Objects.equals(readWriteGroups, that.readWriteGroups) && Objects.equals(readOnlyGroups, that.readOnlyGroups) && Objects.equals(actionPermissions, that.actionPermissions) && Objects.equals(bounceAfterScale, that.bounceAfterScale) && Objects.equals(emailConfigurationOverrides, that.emailConfigurationOverrides) && Objects.equals(hideEvenNumberAcrossRacksHint, that.hideEvenNumberAcrossRacksHint) && Objects.equals(taskLogErrorRegex, that.taskLogErrorRegex) && Objects.equals( taskLogErrorRegexCaseSensitive, that.taskLogErrorRegexCaseSensitive ) && Objects.equals(taskPriorityLevel, that.taskPriorityLevel) && Objects.equals(maxTasksPerOffer, that.maxTasksPerOffer) && Objects.equals(allowBounceToSameHost, that.allowBounceToSameHost) && Objects.equals(dataCenter, that.dataCenter) ); } @Override public int hashCode() { return Objects.hash( id, requestType, owners, numRetriesOnFailure, schedule, quartzSchedule, scheduleTimeZone, scheduleType, killOldNonLongRunningTasksAfterMillis, taskExecutionTimeLimitMillis, scheduledExpectedRuntimeMillis, waitAtLeastMillisAfterTaskFinishesForReschedule, instances, skipHealthchecks, rackSensitive, rackAffinity, agentPlacement, requiredAgentAttributes, allowedAgentAttributes, agentAttributeMinimums, loadBalanced, requiredRole, group, readWriteGroups, readOnlyGroups, actionPermissions, bounceAfterScale, emailConfigurationOverrides, hideEvenNumberAcrossRacksHint, taskLogErrorRegex, taskLogErrorRegexCaseSensitive, taskPriorityLevel, maxTasksPerOffer, allowBounceToSameHost, dataCenter ); } @Override public String toString() { return ( "SingularityRequestBuilder{" + "id='" + id + '\'' + ", requestType=" + requestType + ", owners=" + owners + ", numRetriesOnFailure=" + numRetriesOnFailure + ", schedule=" + schedule + ", quartzSchedule=" + quartzSchedule + ", scheduleTimeZone=" + scheduleTimeZone + ", scheduleType=" + scheduleType + ", killOldNonLongRunningTasksAfterMillis=" + killOldNonLongRunningTasksAfterMillis + ", taskExecutionTimeLimitMillis=" + taskExecutionTimeLimitMillis + ", scheduledExpectedRuntimeMillis=" + scheduledExpectedRuntimeMillis + ", waitAtLeastMillisAfterTaskFinishesForReschedule=" + waitAtLeastMillisAfterTaskFinishesForReschedule + ", instances=" + instances + ", skipHealthchecks=" + skipHealthchecks + ", rackSensitive=" + rackSensitive + ", rackAffinity=" + rackAffinity + ", agentPlacement=" + agentPlacement + ", requiredAgentAttributes=" + requiredAgentAttributes + ", allowedAgentAttributes=" + allowedAgentAttributes + ", agentAttributeMinimums=" + agentAttributeMinimums + ", loadBalanced=" + loadBalanced + ", requiredRole=" + requiredRole + ", group=" + group + ", readWriteGroups=" + readWriteGroups + ", readOnlyGroups=" + readOnlyGroups + ", actionPermissions=" + actionPermissions + ", bounceAfterScale=" + bounceAfterScale + ", emailConfigurationOverrides=" + emailConfigurationOverrides + ", hideEvenNumberAcrossRacksHint=" + hideEvenNumberAcrossRacksHint + ", taskLogErrorRegex=" + taskLogErrorRegex + ", taskLogErrorRegexCaseSensitive=" + taskLogErrorRegexCaseSensitive + ", taskPriorityLevel=" + taskPriorityLevel + ", maxTasksPerOffer=" + maxTasksPerOffer + ", allowBounceToSameHost=" + allowBounceToSameHost + ", dataCenter=" + dataCenter + '}' ); } }
package org.wso2.carbon.identity.tests.passive.sts; import org.apache.catalina.LifecycleException; import org.apache.catalina.core.StandardHost; import org.apache.catalina.startup.Tomcat; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.wso2.carbon.automation.api.clients.identity.application.mgt.ApplicationManagementServiceClient; import org.wso2.carbon.automation.core.utils.LoginLogoutUtil; import org.wso2.carbon.identity.application.common.model.xsd.*; import org.wso2.carbon.identity.tests.ISIntegrationTest; import org.wso2.carbon.identity.tests.utils.DataExtractUtil; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class TestPassiveSTS extends ISIntegrationTest { private static final String ADMIN_EMAIL = "admin@wso2.com"; private static final String SERVICE_PROVIDER_NAME = "PassiveSTSSampleApp"; private static final String SERVICE_PROVIDER_Desc = "PassiveSTS Service Provider"; private static final String PASSIVE_STS_SAMPLE_APP_NAME = "/PassiveSTSSampleApp"; private static final String EMAIL_CLAIM_URI = "http://wso2.org/claims/emailaddress"; private static final String GIVEN_NAME_CLAIM_URI = "http://wso2.org/claims/givenname"; private static final String PASSIVE_STS_SAMPLE_APP_URL = "http://localhost:8080/PassiveSTSSampleApp"; private static final String COMMON_AUTH_URL = "https://localhost:9443/commonauth"; private static final String HTTP_RESPONSE_HEADER_LOCATION = "location"; private String adminUsername; private String adminPassword; private String sessionDataKey; private String resultPage; private Tomcat tomcat; private LoginLogoutUtil logManger; private ApplicationManagementServiceClient appMgtclient; private ServiceProvider serviceProvider; private DefaultHttpClient client; @BeforeClass(alwaysRun = true) public void testInit() throws Exception { super.init(0); logManger = new LoginLogoutUtil(isServer.getBackEndUrl()); adminUsername = userInfo.getUserName(); adminPassword = userInfo.getPassword(); logManger.login(adminUsername, adminPassword, isServer.getBackEndUrl()); appMgtclient = new ApplicationManagementServiceClient(isServer.getSessionCookie(), isServer.getBackEndUrl(), null); client = new DefaultHttpClient(); setSystemProperties(); } @AfterClass(alwaysRun = true) public void atEnd() throws Exception { if(tomcat != null){ tomcat.stop(); tomcat.destroy(); } } @Test(alwaysRun = true, description = "Deploy PassiveSTSSampleApp") public void testDeployPassiveSTSSampleApp() { try { Tomcat tomcat = getTomcat(); URL resourceUrl = getClass().getResource(File.separator + "samples" + File.separator + "PassiveSTSSampleApp.war"); startTomcat(tomcat, PASSIVE_STS_SAMPLE_APP_NAME, resourceUrl.getPath()); } catch (Exception e) { Assert.fail("PassiveSTSSampleApp application deployment failed.", e); } } @Test(alwaysRun = true, description = "Add service provider") public void testAddSP() throws Exception { serviceProvider = new ServiceProvider(); serviceProvider.setApplicationName(SERVICE_PROVIDER_NAME); serviceProvider.setDescription(SERVICE_PROVIDER_Desc); appMgtclient.createApplication(serviceProvider); serviceProvider = appMgtclient.getApplication(SERVICE_PROVIDER_NAME); Assert.assertNotNull(serviceProvider, "Service provider registration failed."); } @Test(alwaysRun = true, description = "Update service provider with passiveSTS configs", dependsOnMethods = "testAddSP") public void testUpdateSP() throws Exception { serviceProvider.setOutboundProvisioningConfig(new OutboundProvisioningConfig()); List<InboundAuthenticationRequestConfig> authRequestList = new ArrayList<InboundAuthenticationRequestConfig>(); String passiveSTSRealm = SERVICE_PROVIDER_NAME; if (passiveSTSRealm != null) { InboundAuthenticationRequestConfig opicAuthenticationRequest = new InboundAuthenticationRequestConfig(); opicAuthenticationRequest.setInboundAuthKey(passiveSTSRealm); opicAuthenticationRequest.setInboundAuthType("passivests"); authRequestList.add(opicAuthenticationRequest); } if (authRequestList.size() > 0) { serviceProvider.getInboundAuthenticationConfig() .setInboundAuthenticationRequestConfigs( authRequestList .toArray(new InboundAuthenticationRequestConfig[authRequestList .size()])); } appMgtclient.updateApplicationData(serviceProvider); Assert.assertNotEquals(appMgtclient.getApplication(SERVICE_PROVIDER_NAME) .getInboundAuthenticationConfig() .getInboundAuthenticationRequestConfigs().length, 0, "Fail to update service provider with passiveSTS configs"); } @Test(alwaysRun = true, description = "Update service provider with claim configurations", dependsOnMethods = "testUpdateSP") public void testAddClaimConfiguration() throws Exception { serviceProvider.getClaimConfig().setClaimMappings(getClaimMappings()); appMgtclient.updateApplicationData(serviceProvider); ServiceProvider updatedServiceProvider = appMgtclient.getApplication(SERVICE_PROVIDER_NAME); ClaimConfig updatedClaimConfig = updatedServiceProvider.getClaimConfig(); Assert.assertEquals(updatedClaimConfig.getClaimMappings()[0].getLocalClaim().getClaimUri(), GIVEN_NAME_CLAIM_URI, "Failed update given name claim uri"); Assert.assertEquals(updatedClaimConfig.getClaimMappings()[1].getLocalClaim().getClaimUri(), EMAIL_CLAIM_URI, "Failed update email claim uri"); } @Test(alwaysRun = true, description = "Invoke PassiveSTSSampleApp", dependsOnMethods = "testAddClaimConfiguration") public void testInvokePassiveSTSSampleApp() throws IOException { HttpGet request = new HttpGet(PASSIVE_STS_SAMPLE_APP_URL); HttpResponse response = client.execute(request); Assert.assertNotNull(response, "PassiveSTSSampleApp invoke response is null"); int responseCode = response.getStatusLine().getStatusCode(); Assert.assertEquals(responseCode, 200, "Invalid Response"); Map<String, Integer> keyPositionMap = new HashMap<String, Integer>(1); keyPositionMap.put("name=\"sessionDataKey\"", 1); List<DataExtractUtil.KeyValue> keyValues = DataExtractUtil.extractDataFromResponse(response, keyPositionMap); Assert.assertNotNull(keyValues, "sessionDataKey key value is null"); sessionDataKey = keyValues.get(0).getValue(); Assert.assertNotNull(sessionDataKey, "Session data key is null."); EntityUtils.consume(response.getEntity()); } @Test(alwaysRun = true, description = "Send login post request", dependsOnMethods = "testInvokePassiveSTSSampleApp") public void testSendLoginRequestPost() throws Exception { HttpPost request = new HttpPost(COMMON_AUTH_URL); List<NameValuePair> urlParameters = new ArrayList<NameValuePair>(); urlParameters.add(new BasicNameValuePair("username", adminUsername)); urlParameters.add(new BasicNameValuePair("password", adminPassword)); urlParameters.add(new BasicNameValuePair("sessionDataKey", sessionDataKey)); request.setEntity(new UrlEncodedFormEntity(urlParameters)); HttpResponse response = client.execute(request); Assert.assertNotNull(response, "Login response is null."); Assert.assertEquals(response.getStatusLine().getStatusCode(), 302, "Invalid Response"); Header locationHeader = response.getFirstHeader(HTTP_RESPONSE_HEADER_LOCATION); Assert.assertNotNull(locationHeader, "Login response header is null"); HttpGet getRequest = new HttpGet(locationHeader.getValue()); EntityUtils.consume(response.getEntity()); response = client.execute(getRequest); resultPage = DataExtractUtil.getContentData(response); EntityUtils.consume(response.getEntity()); } @Test(alwaysRun = true, description = "Test PassiveSTS Claims", dependsOnMethods = "testSendLoginRequestPost") public void testPassiveSTSClaims() { Assert.assertTrue(resultPage.contains(GIVEN_NAME_CLAIM_URI), "Claim givenname is expected"); Assert.assertTrue(resultPage.contains(adminUsername), "Claim value givenname is expected"); Assert.assertTrue(resultPage.contains(EMAIL_CLAIM_URI), "Claim email is expected"); Assert.assertTrue(resultPage.contains(ADMIN_EMAIL), "Claim value email is expected"); } private void startTomcat(Tomcat tomcat, String webAppUrl, String webAppPath) throws LifecycleException { tomcat.addWebapp(tomcat.getHost(), webAppUrl, webAppPath); tomcat.start(); } private Tomcat getTomcat() { tomcat = new Tomcat(); tomcat.getService().setContainer(tomcat.getEngine()); tomcat.setPort(8080); tomcat.setBaseDir("."); StandardHost stdHost = (StandardHost) tomcat.getHost(); stdHost.setAppBase("."); stdHost.setAutoDeploy(true); stdHost.setDeployOnStartup(true); stdHost.setUnpackWARs(true); tomcat.setHost(stdHost); return tomcat; } private void setSystemProperties() { URL resourceUrl = getClass().getResource(File.separator + "keystores" + File.separator + "products" + File.separator + "wso2carbon.jks"); System.setProperty("javax.net.ssl.trustStore", resourceUrl.getPath()); System.setProperty("javax.net.ssl.trustStorePassword", "wso2carbon"); System.setProperty("javax.net.ssl.trustStoreType", "JKS"); } private ClaimMapping[] getClaimMappings() { List<ClaimMapping> claimMappingList = new ArrayList<ClaimMapping>(); Claim givenNameClaim = new Claim(); givenNameClaim.setClaimUri(GIVEN_NAME_CLAIM_URI); ClaimMapping givenNameClaimMapping = new ClaimMapping(); givenNameClaimMapping.setRequested(true); givenNameClaimMapping.setLocalClaim(givenNameClaim); givenNameClaimMapping.setRemoteClaim(givenNameClaim); claimMappingList.add(givenNameClaimMapping); Claim emailClaim = new Claim(); emailClaim.setClaimUri(EMAIL_CLAIM_URI); ClaimMapping emailClaimMapping = new ClaimMapping(); emailClaimMapping.setRequested(true); emailClaimMapping.setLocalClaim(emailClaim); emailClaimMapping.setRemoteClaim(emailClaim); claimMappingList.add(emailClaimMapping); return claimMappingList.toArray(new ClaimMapping[claimMappingList.size()]); } }
package io.aif.language.sentence.splitters; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Guice; import org.apache.log4j.Logger; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import io.aif.language.common.ISplitter; import io.aif.language.common.settings.ISettings; import io.aif.language.common.settings.SettingsModule; import io.aif.language.sentence.separators.classificators.ISeparatorGroupsClassifier; import io.aif.language.sentence.separators.extractors.ISeparatorExtractor; import io.aif.language.sentence.separators.groupers.ISeparatorsGrouper; public abstract class AbstractSentenceSplitter implements ISplitter<List<String>, List<String>> { private static final Logger logger = Logger.getLogger(AbstractSentenceSplitter.class); private final ISeparatorExtractor sentenceSeparatorExtractor; private final ISeparatorsGrouper sentenceSeparatorsGrouper; private final ISeparatorGroupsClassifier sentenceSeparatorGroupsClassificatory; protected AbstractSentenceSplitter(final ISeparatorExtractor sentenceSeparatorExtractor, final ISeparatorsGrouper sentenceSeparatorsGrouper, final ISeparatorGroupsClassifier sentenceSeparatorGroupsClassificatory) { this.sentenceSeparatorExtractor = sentenceSeparatorExtractor; this.sentenceSeparatorsGrouper = sentenceSeparatorsGrouper; this.sentenceSeparatorGroupsClassificatory = sentenceSeparatorGroupsClassificatory; } @VisibleForTesting static List<String> prepareSentences(final List<String> sentence, final List<Character> separators) { final List<String> preparedTokens = new ArrayList<>(); for (String token : sentence) { preparedTokens.addAll(prepareToken(token, separators)); } return preparedTokens; } @VisibleForTesting static List<String> prepareToken(final String token, final List<Character> separators) { final List<String> tokens = new ArrayList<>(3); final int lastPosition = lastNonSeparatorPosition(token, separators); final int firstPosition = firstNonSeparatorPosition(token, separators); if (firstPosition != 0) { tokens.add(token.substring(0, firstPosition)); } tokens.add(token.substring(firstPosition, lastPosition)); if (lastPosition != token.length()) { tokens.add(token.substring(lastPosition, token.length())); } return tokens; } @VisibleForTesting static int firstNonSeparatorPosition(final String token, final List<Character> separarors) { if (!separarors.contains(token.charAt(0))) { return 0; } int i = 0; while (i < token.length() && separarors.contains(token.charAt(i))) { i++; } if (i == token.length()) { return 0; } return i; } @VisibleForTesting static int lastNonSeparatorPosition(final String token, final List<Character> separators) { if (!separators.contains(token.charAt(token.length() - 1))) { return token.length(); } int i = token.length() - 1; while (i > 0 && separators.contains(token.charAt(i))) { i--; } i++; if (i == 0) { return token.length(); } return i; } public List<List<String>> split(final List<String> tokens) { final ISettings settings = Guice.createInjector(new SettingsModule()).getInstance(ISettings.class); if (tokens.size() <= settings.recommendedMinimumTokensInputCount()) { logger.warn( String.format("Tokens input count is too low: %d, recommend count is: %d. Don't expect " + "good quality of output", tokens.size(), settings.recommendedMinimumTokensInputCount())); } logger.debug(String.format("Starting sentence extraction for tokens: %d", tokens.size())); final Optional<List<Character>> optionalSeparators = sentenceSeparatorExtractor.extract(tokens); if (!optionalSeparators.isPresent() || optionalSeparators.get().size() == 0) { logger.error("Fail to extract any sentence separators, returning tokens"); return new ArrayList<List<String>>() {{ add(tokens); }}; } final List<Character> separators = optionalSeparators.get(); logger.debug( String.format("Sentences separators in this text: %s", Arrays.toString(separators.toArray()))); final List<Set<Character>> separatorsGroups = sentenceSeparatorsGrouper.group(tokens, separators); final Map<ISeparatorGroupsClassifier.Group, Set<Character>> separatorsGroupsClassified = sentenceSeparatorGroupsClassificatory.classify(tokens, separatorsGroups); final List<Boolean> booleans = split(tokens, separatorsGroupsClassified); final SentenceIterator sentenceIterator = new SentenceIterator(tokens, booleans); final List<List<String>> sentences = new ArrayList<>(); while (sentenceIterator.hasNext()) { sentences.add(sentenceIterator.next()); } logger.debug(String.format("Founded %d sentences", sentences.size())); return sentences .parallelStream() .map(sentence -> prepareSentences(sentence, separators)) .collect(Collectors.toList()); } public abstract List<Boolean> split(final List<String> target, final Map<ISeparatorGroupsClassifier.Group, Set<Character>> splitters); public enum Type { SIMPLE(new SimpleSentenceSplitter()), HEURISTIC(new HeuristicSentenceSplitter()); private final AbstractSentenceSplitter instance; Type(AbstractSentenceSplitter instance) { this.instance = instance; } public AbstractSentenceSplitter getInstance() { return instance; } } @VisibleForTesting static class SentenceIterator implements Iterator<List<String>> { private final List<String> tokens; private final List<Boolean> endTokens; private int currentPosition = 0; public SentenceIterator(List<String> tokens, List<Boolean> endTokens) { assert tokens != null; assert endTokens != null; assert tokens.size() == endTokens.size(); this.tokens = tokens; this.endTokens = endTokens; } @Override public boolean hasNext() { return currentPosition != tokens.size(); } @Override public List<String> next() { final List<String> sentence = getNextSentence(); return sentence; } private List<String> getNextSentence() { final int oldIndex = currentPosition; currentPosition = getNextTrueIndex(); return this.tokens.subList(oldIndex, currentPosition); } private int getNextTrueIndex() { int startIndex = currentPosition; if (endTokens.size() == startIndex) { return startIndex; } if (endTokens.size() == startIndex + 1) { return startIndex + 1; } do { if (endTokens.get(startIndex)) { startIndex++; return startIndex; } startIndex++; } while (startIndex < endTokens.size() - 1); return startIndex + 1; } } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.evpnopenflow.rsc.baseport.impl; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.Sets; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.packet.IpAddress; import org.onlab.packet.MacAddress; import org.onlab.util.KryoNamespace; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.evpnopenflow.rsc.BasePort; import org.onosproject.evpnopenflow.rsc.BasePortId; import org.onosproject.evpnopenflow.rsc.DefaultBasePort; import org.onosproject.evpnopenflow.rsc.baseport.BasePortEvent; import org.onosproject.evpnopenflow.rsc.baseport.BasePortListener; import org.onosproject.evpnopenflow.rsc.baseport.BasePortService; import org.onosproject.net.DeviceId; import org.onosproject.net.Host; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.service.EventuallyConsistentMap; import org.onosproject.store.service.MultiValuedTimestamp; import org.onosproject.store.service.StorageService; import org.onosproject.store.service.WallClockTimestamp; import org.onosproject.vtnrsc.AllowedAddressPair; import org.onosproject.vtnrsc.BindingHostId; import org.onosproject.vtnrsc.DefaultFloatingIp; import org.onosproject.vtnrsc.FixedIp; import org.onosproject.vtnrsc.FloatingIp; import org.onosproject.vtnrsc.FloatingIpId; import org.onosproject.vtnrsc.RouterId; import org.onosproject.vtnrsc.SecurityGroup; import org.onosproject.vtnrsc.SubnetId; import org.onosproject.vtnrsc.TenantId; import org.onosproject.vtnrsc.TenantNetwork; import org.onosproject.vtnrsc.TenantNetworkId; import org.onosproject.vtnrsc.TenantRouter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; import static org.onosproject.evpnopenflow.rsc.EvpnConstants.APP_ID; import static org.onosproject.evpnopenflow.rsc.EvpnConstants.BASE_PORT_STORE; import static org.onosproject.evpnopenflow.rsc.EvpnConstants.LISTENER_NOT_NULL; import static org.onosproject.evpnopenflow.rsc.EvpnConstants.RESPONSE_NOT_NULL; /** * Provides implementation of the BasePort APIs. */ @Component(immediate = true) @Service public class BasePortManager implements BasePortService { private final Set<BasePortListener> listeners = Sets .newCopyOnWriteArraySet(); private final Logger log = LoggerFactory.getLogger(getClass()); private static final String BASEPORT_ID_NULL = "BasePort ID cannot be " + "null"; private static final String BASEPORT_NOT_NULL = "BasePort cannot be " + "null"; private static final String TENANTID_NOT_NULL = "TenantId cannot be null"; private static final String NETWORKID_NOT_NULL = "NetworkId cannot be null"; private static final String DEVICEID_NOT_NULL = "DeviceId cannot be null"; private static final String FIXEDIP_NOT_NULL = "FixedIp cannot be null"; private static final String MAC_NOT_NULL = "Mac address cannot be null"; private static final String IP_NOT_NULL = "Ip cannot be null"; private static final String EVENT_NOT_NULL = "event cannot be null"; private static final String SET = "set"; private static final String UPDATE = "update"; private static final String DELETE = "delete"; private static final String SLASH = "/"; private static final String PROTON_BASE_PORT = "Port"; private static final String JSON_NOT_NULL = "JsonNode can not be null"; protected EventuallyConsistentMap<BasePortId, BasePort> vPortStore; protected ApplicationId appId; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected StorageService storageService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected CoreService coreService; @Activate public void activate() { appId = coreService.registerApplication(APP_ID); KryoNamespace.Builder serializer = KryoNamespace.newBuilder() .register(KryoNamespaces.API) .register(MultiValuedTimestamp.class) .register(TenantNetworkId.class) .register(Host.class) .register(TenantNetwork.class) .register(TenantNetworkId.class) .register(TenantId.class) .register(SubnetId.class) .register(BasePortId.class) .register(BasePort.State.class) .register(AllowedAddressPair.class) .register(FixedIp.class) .register(FloatingIp.class) .register(FloatingIpId.class) .register(FloatingIp.Status.class) .register(UUID.class) .register(DefaultFloatingIp.class) .register(BindingHostId.class) .register(SecurityGroup.class) .register(IpAddress.class) .register(DefaultBasePort.class) .register(RouterId.class) .register(TenantRouter.class) .register(BasePort.class); vPortStore = storageService .<BasePortId, BasePort>eventuallyConsistentMapBuilder() .withName(BASE_PORT_STORE).withSerializer(serializer) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.info("Started"); } @Deactivate public void deactivate() { vPortStore.destroy(); log.info("Stoppped"); } @Override public boolean exists(BasePortId vPortId) { checkNotNull(vPortId, BASEPORT_ID_NULL); return vPortStore.containsKey(vPortId); } @Override public BasePort getPort(BasePortId vPortId) { checkNotNull(vPortId, BASEPORT_ID_NULL); return vPortStore.get(vPortId); } @Override public BasePort getPort(FixedIp fixedIP) { checkNotNull(fixedIP, FIXEDIP_NOT_NULL); List<BasePort> vPorts = new ArrayList<>(); vPortStore.values().forEach(p -> { for (FixedIp fixedIp : p.fixedIps()) { if (fixedIp.equals(fixedIP)) { vPorts.add(p); break; } } }); if (vPorts.size() == 0) { return null; } return vPorts.get(0); } @Override public BasePort getPort(MacAddress mac) { checkNotNull(mac, MAC_NOT_NULL); List<BasePort> vPorts = new ArrayList<>(); vPortStore.values().forEach(p -> { if (p.macAddress().equals(mac)) { vPorts.add(p); } }); if (vPorts.size() == 0) { return null; } return vPorts.get(0); } @Override public BasePort getPort(TenantNetworkId networkId, IpAddress ip) { checkNotNull(networkId, NETWORKID_NOT_NULL); checkNotNull(ip, IP_NOT_NULL); List<BasePort> vPorts = new ArrayList<>(); vPortStore.values().stream().filter(p -> p.networkId().equals(networkId)) .forEach(p -> { for (FixedIp fixedIp : p.fixedIps()) { if (fixedIp.ip().equals(ip)) { vPorts.add(p); break; } } }); if (vPorts.size() == 0) { return null; } return vPorts.get(0); } @Override public Collection<BasePort> getPorts() { return Collections.unmodifiableCollection(vPortStore.values()); } @Override public Collection<BasePort> getPorts(TenantNetworkId networkId) { checkNotNull(networkId, NETWORKID_NOT_NULL); return vPortStore.values().stream().filter(d -> d.networkId().equals(networkId)) .collect(Collectors.toList()); } @Override public Collection<BasePort> getPorts(TenantId tenantId) { checkNotNull(tenantId, TENANTID_NOT_NULL); return vPortStore.values().stream().filter(d -> d.tenantId().equals(tenantId)) .collect(Collectors.toList()); } @Override public Collection<BasePort> getPorts(DeviceId deviceId) { checkNotNull(deviceId, DEVICEID_NOT_NULL); return vPortStore.values().stream().filter(d -> d.deviceId().equals(deviceId)) .collect(Collectors.toList()); } @Override public boolean createPorts(Iterable<BasePort> vPorts) { checkNotNull(vPorts, BASEPORT_NOT_NULL); for (BasePort vPort : vPorts) { log.info("vPortId is {} ", vPort.portId().toString()); vPortStore.put(vPort.portId(), vPort); if (!vPortStore.containsKey(vPort.portId())) { log.info("The basePort is created failed whose identifier is" + " {} ", vPort.portId().toString()); return false; } } return true; } @Override public boolean updatePorts(Iterable<BasePort> vPorts) { checkNotNull(vPorts, BASEPORT_NOT_NULL); for (BasePort vPort : vPorts) { vPortStore.put(vPort.portId(), vPort); if (!vPortStore.containsKey(vPort.portId())) { log.info("The basePort is not exist whose identifier is {}", vPort.portId().toString()); return false; } vPortStore.put(vPort.portId(), vPort); if (!vPort.equals(vPortStore.get(vPort.portId()))) { log.info("The basePort is updated failed whose identifier " + "is {}", vPort.portId().toString()); return false; } } return true; } @Override public boolean removePorts(Iterable<BasePortId> vPortIds) { checkNotNull(vPortIds, BASEPORT_ID_NULL); for (BasePortId vPortId : vPortIds) { vPortStore.remove(vPortId); if (vPortStore.containsKey(vPortId)) { log.info("The basePort is removed failed whose identifier is" + " {}", vPortId.toString()); return false; } } return true; } /** * Returns a collection of basePorts from subnetNodes. * * @param vPortNodes the basePort json node * @return BasePort collection of vpn ports */ private Collection<BasePort> changeJsonToSub(JsonNode vPortNodes) { checkNotNull(vPortNodes, JSON_NOT_NULL); Set<FixedIp> fixedIps = null; TenantNetworkId tenantNetworkId = null; Map<BasePortId, BasePort> vportMap = new HashMap<>(); Map<String, String> strMap = new HashMap<>(); BasePortId basePortId = BasePortId.portId(vPortNodes.get("id").asText()); String name = vPortNodes.get("name").asText(); TenantId tenantId = TenantId .tenantId(vPortNodes.get("tenant_id").asText()); Boolean adminStateUp = vPortNodes.get("admin_state_up").asBoolean(); String state = vPortNodes.get("status").asText(); MacAddress macAddress = MacAddress .valueOf(vPortNodes.get("mac_address").asText()); DeviceId deviceId = DeviceId .deviceId(vPortNodes.get("device_id").asText()); String deviceOwner = vPortNodes.get("device_owner").asText(); BindingHostId bindingHostId = BindingHostId .bindingHostId(vPortNodes.get("host_id").asText()); String bindingVnicType = vPortNodes.get("vnic_type").asText(); String bindingVifType = vPortNodes.get("vif_type").asText(); String bindingVifDetails = vPortNodes.get("vif_details").asText(); strMap.put("name", name); strMap.put("deviceOwner", deviceOwner); strMap.put("bindingVnicType", bindingVnicType); strMap.put("bindingVifType", bindingVifType); strMap.put("bindingVifDetails", bindingVifDetails); BasePort prevBasePort = getPort(basePortId); if (prevBasePort != null) { fixedIps = prevBasePort.fixedIps(); tenantNetworkId = prevBasePort.networkId(); } BasePort vPort = new DefaultBasePort(basePortId, tenantNetworkId, adminStateUp, strMap, state, macAddress, tenantId, deviceId, fixedIps, bindingHostId, null, null); vportMap.put(basePortId, vPort); return Collections.unmodifiableCollection(vportMap.values()); } /** * Returns BasePort State. * * @param state the base port state * @return the basePort state */ private BasePort.State isState(String state) { if (state.equals("ACTIVE")) { return BasePort.State.ACTIVE; } else { return BasePort.State.DOWN; } } /** * process Etcd response for port information. * * @param action can be either update or delete * @param key can contain the id and also target information * @param value content of the port configuration */ @Override public void processGluonConfig(String action, String key, JsonNode value) { Collection<BasePort> basePorts; switch (action) { case DELETE: String[] list = key.split(SLASH); BasePortId basePortId = BasePortId.portId(list[list.length - 1]); Set<BasePortId> basePortIds = Sets.newHashSet(basePortId); removePorts(basePortIds); break; case SET: checkNotNull(value, RESPONSE_NOT_NULL); basePorts = changeJsonToSub(value); createPorts(basePorts); break; case UPDATE: checkNotNull(value, RESPONSE_NOT_NULL); basePorts = changeJsonToSub(value); updatePorts(basePorts); break; default: log.info("Invalid action is received while processing VPN " + "port configuration"); } } private void parseEtcdResponse(JsonNode jsonNode, String key, String action) { JsonNode modifyValue = null; if (action.equals(SET)) { modifyValue = jsonNode.get(key); } String[] list = key.split(SLASH); String target = list[list.length - 2]; if (target.equals(PROTON_BASE_PORT)) { processGluonConfig(action, key, modifyValue); } } @Override public void addListener(BasePortListener listener) { checkNotNull(listener, LISTENER_NOT_NULL); listeners.add(listener); } @Override public void removeListener(BasePortListener listener) { checkNotNull(listener, LISTENER_NOT_NULL); listeners.remove(listener); } /** * Notifies specify event to all listeners. * * @param event vpn af config event */ private void notifyListeners(BasePortEvent event) { checkNotNull(event, EVENT_NOT_NULL); listeners.forEach(listener -> listener.event(event)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.work.filter; import io.netty.buffer.DrillBuf; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.exec.ops.AccountingDataTunnel; import org.apache.drill.exec.ops.Consumer; import org.apache.drill.exec.ops.DataTunnelStatusHandler; import org.apache.drill.exec.ops.SendingAccountor; import org.apache.drill.exec.proto.BitData; import org.apache.drill.exec.proto.CoordinationProtos; import org.apache.drill.exec.proto.UserBitShared; import org.apache.drill.exec.rpc.RpcException; import org.apache.drill.exec.rpc.RpcOutcomeListener; import org.apache.drill.exec.rpc.data.DataTunnel; import org.apache.drill.exec.server.DrillbitContext; import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** * This sink receives the RuntimeFilters from the netty thread, * aggregates them in an async thread, broadcast the final aggregated * one to the RuntimeFilterRecordBatch. */ public class RuntimeFilterSink implements Closeable { private BlockingQueue<RuntimeFilterWritable> rfQueue = new LinkedBlockingQueue<>(); private Map<Integer, Integer> joinMjId2rfNumber; //HashJoin node's major fragment id to its corresponding probe side nodes's endpoints private Map<Integer, List<CoordinationProtos.DrillbitEndpoint>> joinMjId2probeScanEps = new HashMap<>(); //HashJoin node's major fragment id to its corresponding probe side scan node's belonging major fragment id private Map<Integer, Integer> joinMjId2ScanMjId = new HashMap<>(); //HashJoin node's major fragment id to its aggregated RuntimeFilterWritable private Map<Integer, RuntimeFilterWritable> joinMjId2AggregatedRF = new HashMap<>(); //for debug usage private Map<Integer, Stopwatch> joinMjId2Stopwatch = new HashMap<>(); private DrillbitContext drillbitContext; private SendingAccountor sendingAccountor; private AsyncAggregateWorker asyncAggregateWorker; private AtomicBoolean running = new AtomicBoolean(true); private static final Logger logger = LoggerFactory.getLogger(RuntimeFilterSink.class); public RuntimeFilterSink(DrillbitContext drillbitContext, SendingAccountor sendingAccountor) { this.drillbitContext = drillbitContext; this.sendingAccountor = sendingAccountor; asyncAggregateWorker = new AsyncAggregateWorker(); drillbitContext.getExecutor().submit(asyncAggregateWorker); } public void add(RuntimeFilterWritable runtimeFilterWritable) { if (!running.get()) { runtimeFilterWritable.close(); return; } runtimeFilterWritable.retainBuffers(1); int joinMjId = runtimeFilterWritable.getRuntimeFilterBDef().getMajorFragmentId(); if (joinMjId2Stopwatch.get(joinMjId) == null) { Stopwatch stopwatch = Stopwatch.createStarted(); joinMjId2Stopwatch.put(joinMjId, stopwatch); } synchronized (rfQueue) { if (!running.get()) { runtimeFilterWritable.close(); return; } rfQueue.add(runtimeFilterWritable); rfQueue.notify(); } } public void close() { running.set(false); if (asyncAggregateWorker != null) { synchronized (rfQueue) { rfQueue.notify(); } } while (!asyncAggregateWorker.over.get()) { try { Thread.sleep(100); } catch (InterruptedException e) { logger.error("interrupted while sleeping to wait for the aggregating worker thread to exit", e); } } for (RuntimeFilterWritable runtimeFilterWritable : joinMjId2AggregatedRF.values()) { runtimeFilterWritable.close(); } } private void aggregate(RuntimeFilterWritable srcRuntimeFilterWritable) { BitData.RuntimeFilterBDef runtimeFilterB = srcRuntimeFilterWritable.getRuntimeFilterBDef(); int joinMajorId = runtimeFilterB.getMajorFragmentId(); int buildSideRfNumber; RuntimeFilterWritable toAggregated = null; buildSideRfNumber = joinMjId2rfNumber.get(joinMajorId); buildSideRfNumber--; joinMjId2rfNumber.put(joinMajorId, buildSideRfNumber); toAggregated = joinMjId2AggregatedRF.get(joinMajorId); if (toAggregated == null) { toAggregated = srcRuntimeFilterWritable; toAggregated.retainBuffers(1); } else { toAggregated.aggregate(srcRuntimeFilterWritable); } joinMjId2AggregatedRF.put(joinMajorId, toAggregated); if (buildSideRfNumber == 0) { joinMjId2AggregatedRF.remove(joinMajorId); route(toAggregated); joinMjId2rfNumber.remove(joinMajorId); Stopwatch stopwatch = joinMjId2Stopwatch.get(joinMajorId); logger.info( "received all the RFWs belonging to the majorId {}'s HashJoin nodes and flushed aggregated RFW out elapsed {} ms", joinMajorId, stopwatch.elapsed(TimeUnit.MILLISECONDS) ); } } private void route(RuntimeFilterWritable srcRuntimeFilterWritable) { BitData.RuntimeFilterBDef runtimeFilterB = srcRuntimeFilterWritable.getRuntimeFilterBDef(); int joinMajorId = runtimeFilterB.getMajorFragmentId(); UserBitShared.QueryId queryId = runtimeFilterB.getQueryId(); List<String> probeFields = runtimeFilterB.getProbeFieldsList(); List<Integer> sizeInBytes = runtimeFilterB.getBloomFilterSizeInBytesList(); long rfIdentifier = runtimeFilterB.getRfIdentifier(); DrillBuf[] data = srcRuntimeFilterWritable.getData(); List<CoordinationProtos.DrillbitEndpoint> scanNodeEps = joinMjId2probeScanEps.get(joinMajorId); int scanNodeSize = scanNodeEps.size(); srcRuntimeFilterWritable.retainBuffers(scanNodeSize - 1); int scanNodeMjId = joinMjId2ScanMjId.get(joinMajorId); for (int minorId = 0; minorId < scanNodeEps.size(); minorId++) { BitData.RuntimeFilterBDef.Builder builder = BitData.RuntimeFilterBDef.newBuilder(); for (String probeField : probeFields) { builder.addProbeFields(probeField); } BitData.RuntimeFilterBDef runtimeFilterBDef = builder.setQueryId(queryId) .setMajorFragmentId(scanNodeMjId) .setMinorFragmentId(minorId) .setToForeman(false) .setRfIdentifier(rfIdentifier) .addAllBloomFilterSizeInBytes(sizeInBytes) .build(); RuntimeFilterWritable runtimeFilterWritable = new RuntimeFilterWritable(runtimeFilterBDef, data); CoordinationProtos.DrillbitEndpoint drillbitEndpoint = scanNodeEps.get(minorId); DataTunnel dataTunnel = drillbitContext.getDataConnectionsPool().getTunnel(drillbitEndpoint); Consumer<RpcException> exceptionConsumer = new Consumer<RpcException>() { @Override public void accept(final RpcException e) { logger.warn("fail to broadcast a runtime filter to the probe side scan node", e); } @Override public void interrupt(final InterruptedException e) { logger.warn("fail to broadcast a runtime filter to the probe side scan node", e); } }; RpcOutcomeListener<BitData.AckWithCredit> statusHandler = new DataTunnelStatusHandler(exceptionConsumer, sendingAccountor); AccountingDataTunnel accountingDataTunnel = new AccountingDataTunnel(dataTunnel, sendingAccountor, statusHandler); accountingDataTunnel.sendRuntimeFilter(runtimeFilterWritable); } } public void setJoinMjId2rfNumber(Map<Integer, Integer> joinMjId2rfNumber) { this.joinMjId2rfNumber = joinMjId2rfNumber; } public void setJoinMjId2probeScanEps(Map<Integer, List<CoordinationProtos.DrillbitEndpoint>> joinMjId2probeScanEps) { this.joinMjId2probeScanEps = joinMjId2probeScanEps; } public void setJoinMjId2ScanMjId(Map<Integer, Integer> joinMjId2ScanMjId) { this.joinMjId2ScanMjId = joinMjId2ScanMjId; } private class AsyncAggregateWorker implements Runnable { private AtomicBoolean over = new AtomicBoolean(false); @Override public void run() { while ((joinMjId2rfNumber == null || !joinMjId2rfNumber.isEmpty() ) && running.get()) { RuntimeFilterWritable toAggregate = null; synchronized (rfQueue) { try { toAggregate = rfQueue.poll(); while (toAggregate == null && running.get()) { rfQueue.wait(); toAggregate = rfQueue.poll(); } } catch (InterruptedException ex) { logger.error("RFW_Aggregator thread being interrupted", ex); continue; } } if (toAggregate == null) { continue; } // perform aggregate outside the sync block. try { aggregate(toAggregate); } catch (Exception ex) { logger.error("Failed to aggregate or route the RFW", ex); // Set running to false and cleanup pending RFW in queue. This will make sure producer // thread is also indicated to stop and queue is cleaned up properly in failure cases synchronized (rfQueue) { running.set(false); } cleanupQueue(); throw new DrillRuntimeException(ex); } finally { toAggregate.close(); } } cleanupQueue(); } private void cleanupQueue() { if (!running.get()) { RuntimeFilterWritable toClose; while ((toClose = rfQueue.poll()) != null) { toClose.close(); } } over.set(true); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.transport; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Map; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.apache.cassandra.cql3.BatchQueryOptions; import org.apache.cassandra.cql3.CQLStatement; import org.apache.cassandra.cql3.CQLTester; import org.apache.cassandra.cql3.QueryHandler; import org.apache.cassandra.cql3.QueryOptions; import org.apache.cassandra.cql3.QueryProcessor; import org.apache.cassandra.cql3.statements.BatchStatement; import org.apache.cassandra.cql3.statements.ParsedStatement; import org.apache.cassandra.exceptions.RequestExecutionException; import org.apache.cassandra.exceptions.RequestValidationException; import org.apache.cassandra.service.ClientState; import org.apache.cassandra.service.QueryState; import org.apache.cassandra.transport.messages.BatchMessage; import org.apache.cassandra.transport.messages.ExecuteMessage; import org.apache.cassandra.transport.messages.PrepareMessage; import org.apache.cassandra.transport.messages.QueryMessage; import org.apache.cassandra.transport.messages.ResultMessage; import org.apache.cassandra.utils.MD5Digest; import static org.apache.cassandra.utils.ByteBufferUtil.bytes; public class MessagePayloadTest extends CQLTester { public static Map<String, ByteBuffer> requestPayload; public static Map<String, ByteBuffer> responsePayload; private static Field cqlQueryHandlerField; private static boolean modifiersAccessible; @BeforeClass public static void makeCqlQueryHandlerAccessible() { try { cqlQueryHandlerField = ClientState.class.getDeclaredField("cqlQueryHandler"); cqlQueryHandlerField.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersAccessible = modifiersField.isAccessible(); modifiersField.setAccessible(true); modifiersField.setInt(cqlQueryHandlerField, cqlQueryHandlerField.getModifiers() & ~Modifier.FINAL); } catch (IllegalAccessException | NoSuchFieldException e) { throw new RuntimeException(e); } } @AfterClass public static void resetCqlQueryHandlerField() { if (cqlQueryHandlerField == null) return; try { Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(cqlQueryHandlerField, cqlQueryHandlerField.getModifiers() | Modifier.FINAL); cqlQueryHandlerField.setAccessible(false); modifiersField.setAccessible(modifiersAccessible); } catch (IllegalAccessException | NoSuchFieldException e) { throw new RuntimeException(e); } } @After public void dropCreatedTable() { try { QueryProcessor.executeOnceInternal("DROP TABLE " + KEYSPACE + ".atable"); } catch (Throwable t) { // ignore } } @Test public void testMessagePayload() throws Throwable { QueryHandler queryHandler = (QueryHandler) cqlQueryHandlerField.get(null); cqlQueryHandlerField.set(null, new TestQueryHandler()); try { requireNetwork(); Assert.assertSame(TestQueryHandler.class, ClientState.getCQLQueryHandler().getClass()); SimpleClient client = new SimpleClient(nativeAddr.getHostAddress(), nativePort); try { client.connect(false); Map<String, ByteBuffer> reqMap; Map<String, ByteBuffer> respMap; QueryMessage queryMessage = new QueryMessage( "CREATE TABLE " + KEYSPACE + ".atable (pk int PRIMARY KEY, v text)", QueryOptions.DEFAULT ); PrepareMessage prepareMessage = new PrepareMessage("SELECT * FROM " + KEYSPACE + ".atable"); reqMap = Collections.singletonMap("foo", bytes(42)); responsePayload = respMap = Collections.singletonMap("bar", bytes(42)); queryMessage.setCustomPayload(reqMap); Message.Response queryResponse = client.execute(queryMessage); payloadEquals(reqMap, requestPayload); payloadEquals(respMap, queryResponse.getCustomPayload()); reqMap = Collections.singletonMap("foo", bytes(43)); responsePayload = respMap = Collections.singletonMap("bar", bytes(43)); prepareMessage.setCustomPayload(reqMap); ResultMessage.Prepared prepareResponse = (ResultMessage.Prepared) client.execute(prepareMessage); payloadEquals(reqMap, requestPayload); payloadEquals(respMap, prepareResponse.getCustomPayload()); ExecuteMessage executeMessage = new ExecuteMessage(prepareResponse.statementId, QueryOptions.DEFAULT); reqMap = Collections.singletonMap("foo", bytes(44)); responsePayload = respMap = Collections.singletonMap("bar", bytes(44)); executeMessage.setCustomPayload(reqMap); Message.Response executeResponse = client.execute(executeMessage); payloadEquals(reqMap, requestPayload); payloadEquals(respMap, executeResponse.getCustomPayload()); BatchMessage batchMessage = new BatchMessage(BatchStatement.Type.UNLOGGED, Collections.<Object>singletonList("INSERT INTO " + KEYSPACE + ".atable (pk,v) VALUES (1, 'foo')"), Collections.singletonList(Collections.<ByteBuffer>emptyList()), QueryOptions.DEFAULT); reqMap = Collections.singletonMap("foo", bytes(45)); responsePayload = respMap = Collections.singletonMap("bar", bytes(45)); batchMessage.setCustomPayload(reqMap); Message.Response batchResponse = client.execute(batchMessage); payloadEquals(reqMap, requestPayload); payloadEquals(respMap, batchResponse.getCustomPayload()); } finally { client.close(); } } finally { cqlQueryHandlerField.set(null, queryHandler); } } @Test public void testMessagePayloadVersion3() throws Throwable { QueryHandler queryHandler = (QueryHandler) cqlQueryHandlerField.get(null); cqlQueryHandlerField.set(null, new TestQueryHandler()); try { requireNetwork(); Assert.assertSame(TestQueryHandler.class, ClientState.getCQLQueryHandler().getClass()); SimpleClient client = new SimpleClient(nativeAddr.getHostAddress(), nativePort, Server.VERSION_3); try { client.connect(false); Map<String, ByteBuffer> reqMap; QueryMessage queryMessage = new QueryMessage( "CREATE TABLE " + KEYSPACE + ".atable (pk int PRIMARY KEY, v text)", QueryOptions.DEFAULT ); PrepareMessage prepareMessage = new PrepareMessage("SELECT * FROM " + KEYSPACE + ".atable"); reqMap = Collections.singletonMap("foo", bytes(42)); responsePayload = Collections.singletonMap("bar", bytes(42)); queryMessage.setCustomPayload(reqMap); try { client.execute(queryMessage); Assert.fail(); } catch (RuntimeException e) { Assert.assertTrue(e.getCause() instanceof ProtocolException); } queryMessage.setCustomPayload(null); client.execute(queryMessage); reqMap = Collections.singletonMap("foo", bytes(43)); responsePayload = Collections.singletonMap("bar", bytes(43)); prepareMessage.setCustomPayload(reqMap); try { client.execute(prepareMessage); Assert.fail(); } catch (RuntimeException e) { Assert.assertTrue(e.getCause() instanceof ProtocolException); } prepareMessage.setCustomPayload(null); ResultMessage.Prepared prepareResponse = (ResultMessage.Prepared) client.execute(prepareMessage); ExecuteMessage executeMessage = new ExecuteMessage(prepareResponse.statementId, QueryOptions.DEFAULT); reqMap = Collections.singletonMap("foo", bytes(44)); responsePayload = Collections.singletonMap("bar", bytes(44)); executeMessage.setCustomPayload(reqMap); try { client.execute(executeMessage); Assert.fail(); } catch (RuntimeException e) { Assert.assertTrue(e.getCause() instanceof ProtocolException); } BatchMessage batchMessage = new BatchMessage(BatchStatement.Type.UNLOGGED, Collections.<Object>singletonList("INSERT INTO " + KEYSPACE + ".atable (pk,v) VALUES (1, 'foo')"), Collections.singletonList(Collections.<ByteBuffer>emptyList()), QueryOptions.DEFAULT); reqMap = Collections.singletonMap("foo", bytes(45)); responsePayload = Collections.singletonMap("bar", bytes(45)); batchMessage.setCustomPayload(reqMap); try { client.execute(batchMessage); Assert.fail(); } catch (RuntimeException e) { Assert.assertTrue(e.getCause() instanceof ProtocolException); } } finally { client.close(); } } finally { cqlQueryHandlerField.set(null, queryHandler); } } private static void payloadEquals(Map<String, ByteBuffer> map1, Map<String, ByteBuffer> map2) { Assert.assertNotNull(map1); Assert.assertNotNull(map2); Assert.assertEquals(map1.keySet(), map2.keySet()); for (Map.Entry<String, ByteBuffer> e : map1.entrySet()) Assert.assertEquals(e.getValue(), map2.get(e.getKey())); } public static class TestQueryHandler implements QueryHandler { public ParsedStatement.Prepared getPrepared(MD5Digest id) { return QueryProcessor.instance.getPrepared(id); } public ParsedStatement.Prepared getPreparedForThrift(Integer id) { return QueryProcessor.instance.getPreparedForThrift(id); } public ResultMessage.Prepared prepare(String query, QueryState state, Map<String, ByteBuffer> customPayload) throws RequestValidationException { if (customPayload != null) requestPayload = customPayload; ResultMessage.Prepared result = QueryProcessor.instance.prepare(query, state, customPayload); if (customPayload != null) { result.setCustomPayload(responsePayload); responsePayload = null; } return result; } public ResultMessage process(String query, QueryState state, QueryOptions options, Map<String, ByteBuffer> customPayload) throws RequestExecutionException, RequestValidationException { if (customPayload != null) requestPayload = customPayload; ResultMessage result = QueryProcessor.instance.process(query, state, options, customPayload); if (customPayload != null) { result.setCustomPayload(responsePayload); responsePayload = null; } return result; } public ResultMessage processBatch(BatchStatement statement, QueryState state, BatchQueryOptions options, Map<String, ByteBuffer> customPayload) throws RequestExecutionException, RequestValidationException { if (customPayload != null) requestPayload = customPayload; ResultMessage result = QueryProcessor.instance.processBatch(statement, state, options, customPayload); if (customPayload != null) { result.setCustomPayload(responsePayload); responsePayload = null; } return result; } public ResultMessage processPrepared(CQLStatement statement, QueryState state, QueryOptions options, Map<String, ByteBuffer> customPayload) throws RequestExecutionException, RequestValidationException { if (customPayload != null) requestPayload = customPayload; ResultMessage result = QueryProcessor.instance.processPrepared(statement, state, options, customPayload); if (customPayload != null) { result.setCustomPayload(responsePayload); responsePayload = null; } return result; } } }
/* * Copyright 2002-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.util.xml; import java.util.Iterator; import javax.xml.namespace.NamespaceContext; import javax.xml.namespace.QName; import javax.xml.stream.Location; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLStreamException; import javax.xml.stream.events.Attribute; import javax.xml.stream.events.Comment; import javax.xml.stream.events.Namespace; import javax.xml.stream.events.ProcessingInstruction; import javax.xml.stream.events.StartDocument; import javax.xml.stream.events.XMLEvent; import org.springframework.lang.Nullable; /** * Implementation of the {@link javax.xml.stream.XMLStreamReader} interface that * wraps a {@link XMLEventReader}. Useful because the StAX * {@link javax.xml.stream.XMLInputFactory} allows one to create a event reader * from a stream reader, but not vice-versa. * * @author Arjen Poutsma * @since 3.0 * @see StaxUtils#createEventStreamReader(javax.xml.stream.XMLEventReader) */ class XMLEventStreamReader extends AbstractXMLStreamReader { private XMLEvent event; private final XMLEventReader eventReader; public XMLEventStreamReader(XMLEventReader eventReader) throws XMLStreamException { this.eventReader = eventReader; this.event = eventReader.nextEvent(); } @Override public QName getName() {Thread.dumpStack(); if (this.event.isStartElement()) { return this.event.asStartElement().getName(); } else if (this.event.isEndElement()) { return this.event.asEndElement().getName(); } else { throw new IllegalStateException(); } } @Override public Location getLocation() {Thread.dumpStack(); return this.event.getLocation(); } @Override public int getEventType() {Thread.dumpStack(); return this.event.getEventType(); } @Override @Nullable public String getVersion() {Thread.dumpStack(); if (this.event.isStartDocument()) { return ((StartDocument) this.event).getVersion(); } else { return null; } } @Override public Object getProperty(String name) throws IllegalArgumentException {Thread.dumpStack(); return this.eventReader.getProperty(name); } @Override public boolean isStandalone() {Thread.dumpStack(); if (this.event.isStartDocument()) { return ((StartDocument) this.event).isStandalone(); } else { throw new IllegalStateException(); } } @Override public boolean standaloneSet() {Thread.dumpStack(); if (this.event.isStartDocument()) { return ((StartDocument) this.event).standaloneSet(); } else { throw new IllegalStateException(); } } @Override @Nullable public String getEncoding() {Thread.dumpStack(); return null; } @Override @Nullable public String getCharacterEncodingScheme() {Thread.dumpStack(); return null; } @Override public String getPITarget() {Thread.dumpStack(); if (this.event.isProcessingInstruction()) { return ((ProcessingInstruction) this.event).getTarget(); } else { throw new IllegalStateException(); } } @Override public String getPIData() {Thread.dumpStack(); if (this.event.isProcessingInstruction()) { return ((ProcessingInstruction) this.event).getData(); } else { throw new IllegalStateException(); } } @Override public int getTextStart() {Thread.dumpStack(); return 0; } @Override public String getText() {Thread.dumpStack(); if (this.event.isCharacters()) { return this.event.asCharacters().getData(); } else if (this.event.getEventType() == XMLEvent.COMMENT) { return ((Comment) this.event).getText(); } else { throw new IllegalStateException(); } } @Override @SuppressWarnings("rawtypes") public int getAttributeCount() {Thread.dumpStack(); if (!this.event.isStartElement()) { throw new IllegalStateException(); } Iterator attributes = this.event.asStartElement().getAttributes(); return countIterator(attributes); } @Override public boolean isAttributeSpecified(int index) {Thread.dumpStack(); return getAttribute(index).isSpecified(); } @Override public QName getAttributeName(int index) {Thread.dumpStack(); return getAttribute(index).getName(); } @Override public String getAttributeType(int index) {Thread.dumpStack(); return getAttribute(index).getDTDType(); } @Override public String getAttributeValue(int index) {Thread.dumpStack(); return getAttribute(index).getValue(); } @SuppressWarnings("rawtypes") private Attribute getAttribute(int index) {Thread.dumpStack(); if (!this.event.isStartElement()) { throw new IllegalStateException(); } int count = 0; Iterator attributes = this.event.asStartElement().getAttributes(); while (attributes.hasNext()) { Attribute attribute = (Attribute) attributes.next(); if (count == index) { return attribute; } else { count++; } } throw new IllegalArgumentException(); } @Override public NamespaceContext getNamespaceContext() {Thread.dumpStack(); if (this.event.isStartElement()) { return this.event.asStartElement().getNamespaceContext(); } else { throw new IllegalStateException(); } } @Override @SuppressWarnings("rawtypes") public int getNamespaceCount() {Thread.dumpStack(); Iterator namespaces; if (this.event.isStartElement()) { namespaces = this.event.asStartElement().getNamespaces(); } else if (this.event.isEndElement()) { namespaces = this.event.asEndElement().getNamespaces(); } else { throw new IllegalStateException(); } return countIterator(namespaces); } @Override public String getNamespacePrefix(int index) {Thread.dumpStack(); return getNamespace(index).getPrefix(); } @Override public String getNamespaceURI(int index) {Thread.dumpStack(); return getNamespace(index).getNamespaceURI(); } @SuppressWarnings("rawtypes") private Namespace getNamespace(int index) {Thread.dumpStack(); Iterator namespaces; if (this.event.isStartElement()) { namespaces = this.event.asStartElement().getNamespaces(); } else if (this.event.isEndElement()) { namespaces = this.event.asEndElement().getNamespaces(); } else { throw new IllegalStateException(); } int count = 0; while (namespaces.hasNext()) { Namespace namespace = (Namespace) namespaces.next(); if (count == index) { return namespace; } else { count++; } } throw new IllegalArgumentException(); } @Override public int next() throws XMLStreamException {Thread.dumpStack(); this.event = this.eventReader.nextEvent(); return this.event.getEventType(); } @Override public void close() throws XMLStreamException {Thread.dumpStack(); this.eventReader.close(); } @SuppressWarnings("rawtypes") private static int countIterator(Iterator iterator) {Thread.dumpStack(); int count = 0; while (iterator.hasNext()) { iterator.next(); count++; } return count; } }
/* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.example.spline.viewmodel; import android.content.Context; import android.databinding.BaseObservable; import android.databinding.Bindable; import android.support.v7.widget.PopupMenu; import android.view.MenuItem; import android.view.View; import com.android.example.spline.BR; import com.android.example.spline.R; import com.android.example.spline.model.Document; import com.android.example.spline.model.Layer; import com.android.example.spline.model.LayerGroup; import com.android.example.spline.model.OvalLayer; import com.android.example.spline.model.RectLayer; import com.android.example.spline.model.SelectionGroup; import com.android.example.spline.model.ShapeLayer; import com.android.example.spline.model.TriangleLayer; import com.android.example.spline.persistence.DocumentRepository; import com.android.example.spline.util.FileUtils; import java.util.ArrayList; import java.util.List; /** * The ViewModel companion to Document, DocumentViewModel encapsulates Document's layers and * currentLayer with accessor methods as well as storing other values that don't need to be * persisted, such as the current visibility of the right panel. */ public class DocumentViewModel extends BaseObservable { private Document document; private String fileName; private String rectString; private String triangleString; private String ovalString; private List<Layer> rectLayers; private List<Layer> triangleLayers; private List<Layer> ovalLayers; private int viewportWidth; private int viewportHeight; private Context context; private DocumentRepository repository; private PopupMenu.OnMenuItemClickListener onMenuItemClickListener; public DocumentViewModel(Context context, String fileName) { this.context = context; this.fileName = fileName; repository = DocumentRepository.getInstance(); rectString = context.getString(R.string.rect); triangleString = context.getString(R.string.triangle); ovalString = context.getString(R.string.oval); rectLayers = new ArrayList<>(); triangleLayers = new ArrayList<>(); ovalLayers = new ArrayList<>(); onMenuItemClickListener = new PopupMenu.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { String title = (String) item.getTitle(); if (title.equals(rectString)) { addRectLayer(); } else if (title.equals(triangleString)) { addTriangleLayer(); } else if (title.equals(ovalString)) { addOvalLayer(); } return true; } }; this.document = new Document(); } @Bindable public int getRightPanelVisibility() { boolean hasCurrentLayer = document.getCurrentLayer() != null; boolean hasChildren = true; if (hasCurrentLayer && document.getCurrentLayer() instanceof LayerGroup) { LayerGroup group = (LayerGroup) document.getCurrentLayer(); hasChildren = group.getLayers().size() > 0; } return hasCurrentLayer && hasChildren ? View.VISIBLE : View.GONE; } @Bindable public String getFileName() { return fileName; } @Bindable public String getPrettyFileName() { return FileUtils.getPrettyFilename(fileName); } public Document getDocument() { return document; } public void saveDocument() { repository.save(fileName, document, context); } public void loadDocument() { Document newDoc = repository.load(fileName, context); if (newDoc != null) { this.document = newDoc; } } @Bindable public LayerGroup getRoot() { return document.getRoot(); } @Bindable public Layer getCurrentLayer() { return document.getCurrentLayer(); } public void setCurrentLayer(Layer layer) { if (layer != getCurrentLayer()) { document.setCurrentLayer(layer); notifyPropertyChanged(BR.currentLayer); notifyPropertyChanged(BR.rightPanelVisibility); } } public boolean hasClipboardContents() { return document.getClipboardLayer() != null; } public void deleteCurrentLayer() { Layer l = getCurrentLayer(); if (l != null) { if (l instanceof SelectionGroup) { SelectionGroup selection = (SelectionGroup) l; List<Layer> selectedLayers = selection.getLayers(); for (Layer layer : selectedLayers) { document.removeLayer(layer); } } document.removeLayer(l); l = null; setCurrentLayer(null); } } public void cutCurrentLayer() { Layer l = getCurrentLayer(); if (l != null) { document.removeLayer(l); document.setClipboardLayer(l); setCurrentLayer(null); } } public void copyCurrentLayer() { Layer l = getCurrentLayer(); if (l != null) { document.setClipboardLayer(l); } } public void pasteClipboard() { Layer l = document.getClipboardLayer(); pasteLayerCopy(l); } private void pasteLayerCopy(Layer l) { if (l != null) { Layer copy = l.copy(); addLayer(copy); } } public void duplicateCurrentLayer() { // Duplicate copies current layer, doesn't put this layer on the clipboard pasteLayerCopy(getCurrentLayer()); } @Bindable public float getViewportX() { return document.getViewportX(); } public void setViewportX(float viewportX) { if (viewportX != getViewportX()) { document.setViewportX(viewportX); notifyPropertyChanged(BR.viewportX); } } @Bindable public float getViewportY() { return document.getViewportY(); } public void setViewportY(float viewportY) { if (viewportY != getViewportY()) { document.setViewportY(viewportY); notifyPropertyChanged(BR.viewportY); } } @Bindable public int getViewportWidth() { return viewportWidth; } public void setViewportWidth(int viewportWidth) { if (viewportWidth != getViewportWidth()) { this.viewportWidth = viewportWidth; notifyPropertyChanged(BR.viewportWidth); } } @Bindable public int getViewportHeight() { return viewportHeight; } public void setViewportHeight(int viewportHeight) { if (viewportHeight != getViewportHeight()) { this.viewportHeight = viewportHeight; notifyPropertyChanged(BR.viewportHeight); } } public PopupMenu.OnMenuItemClickListener getOnMenuItemClickListener() { return onMenuItemClickListener; } // The following add methods generate shape layers with default position and sizes for those // layer types public void addRectLayer() { ShapeLayer layer = new RectLayer(); layer.setWidth(600); layer.setHeight(300); layer.setName(rectString + " " + (rectLayers.size() + 1)); addLayer(layer); rectLayers.add(layer); } public void addTriangleLayer() { ShapeLayer layer = new TriangleLayer(); float width = (float) (400 / Math.sqrt(3f) * 2f); width = Math.round(width * 10f) / 10f; layer.setWidth(width); layer.setHeight(400); layer.setName(triangleString + " " + (triangleLayers.size() + 1)); addLayer(layer); triangleLayers.add(layer); } public void addOvalLayer() { ShapeLayer layer = new OvalLayer(); layer.setWidth(400); layer.setHeight(400); layer.setName(ovalString + " " + (ovalLayers.size() + 1)); addLayer(layer); ovalLayers.add(layer); } private void addLayer(Layer layer) { // Center the layer in the current viewport if a majority of the layer in its // current position falls outside of the viewport. This effectively centers new layers, who // are given an initial x, y values of Integer MIN_VALUE. if (layer.getMidX() < -getViewportX() || layer.getMidX() > -getViewportX() + getViewportWidth() || layer.getMidY() < -getViewportY() || layer.getMidY() > -getViewportY() + getViewportHeight()) { layer.setX(-getViewportX() + getViewportWidth() / 2 - layer.getWidth() / 2); layer.setY(-getViewportY() + getViewportHeight() / 2 - layer.getHeight() / 2); } layer.setSelected(true); document.addLayer(layer); setCurrentLayer(layer); } public void convertSelectionToGroup() { Layer l = getCurrentLayer(); if (l != null) { LayerGroup g; if (l instanceof SelectionGroup) { g = ((SelectionGroup) l).copy(); deleteCurrentLayer(); } else { document.removeLayer(l); g = new LayerGroup(); g.addLayer(l); } addLayer(g); } } }
/* * Copyright (c) 2010-2019 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.web.page.admin.resources; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.xml.namespace.QName; import com.evolveum.midpoint.prism.path.ItemPath; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn; import org.apache.wicket.extensions.markup.html.repeater.data.table.PropertyColumn; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.markup.repeater.Item; import org.apache.wicket.markup.repeater.RepeatingView; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.util.ListModel; import org.apache.wicket.request.mapper.parameter.PageParameters; import com.evolveum.midpoint.common.SynchronizationUtils; import com.evolveum.midpoint.common.refinery.RefinedObjectClassDefinition; import com.evolveum.midpoint.common.refinery.RefinedResourceSchema; import com.evolveum.midpoint.common.refinery.RefinedResourceSchemaImpl; import com.evolveum.midpoint.gui.api.model.LoadableModel; import com.evolveum.midpoint.gui.api.page.PageBase; import com.evolveum.midpoint.gui.api.util.WebComponentUtil; import com.evolveum.midpoint.gui.api.util.WebModelServiceUtils; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismProperty; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.processor.ObjectClassComplexTypeDefinition; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.web.component.box.BasicInfoBoxPanel; import com.evolveum.midpoint.web.component.box.InfoBoxPanel; import com.evolveum.midpoint.web.component.box.InfoBoxType; import com.evolveum.midpoint.web.component.data.BoxedTablePanel; import com.evolveum.midpoint.web.component.data.column.ColumnTypeDto; import com.evolveum.midpoint.web.component.data.column.ColumnUtils; import com.evolveum.midpoint.web.component.data.column.LinkPanel; import com.evolveum.midpoint.web.component.util.ListDataProvider; import com.evolveum.midpoint.web.component.util.SelectableBean; import com.evolveum.midpoint.web.page.admin.resources.dto.ResourceConfigurationDto; import com.evolveum.midpoint.web.page.admin.server.PageTaskEdit; import com.evolveum.midpoint.web.util.OnePageParameterEncoder; import com.evolveum.midpoint.xml.ns._public.common.common_3.AvailabilityStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ConnectorType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectSynchronizationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationalStateType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceActivationDefinitionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceAttributeDefinitionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceObjectTypeDefinitionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourcePasswordDefinitionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.TaskType; public class ResourceDetailsTabPanel extends Panel { private static final Trace LOGGER = TraceManager.getTrace(ResourceDetailsTabPanel.class); private static final String DOT_CLASS = ResourceDetailsTabPanel.class.getName() + "."; private static final String OPERATION_SEARCH_TASKS_FOR_RESOURCE = DOT_CLASS + "seachTasks"; public static final String ID_LAST_AVAILABILITY_STATUS = "lastStatus"; private static final String ID_SOURCE_TARGET = "sourceTarget"; private static final String ID_SCHEMA_STATUS = "schemaStatus"; private static final String PANEL_CAPABILITIES = "capabilities"; private static final long serialVersionUID = 1L; LoadableModel<CapabilitiesDto> capabilitiesModel; private PageBase parentPage; public ResourceDetailsTabPanel(String id, final IModel<?> model, PageBase parentPage) { super(id, model); this.parentPage = parentPage; capabilitiesModel = new LoadableModel<CapabilitiesDto>() { private static final long serialVersionUID = 1L; @Override protected CapabilitiesDto load() { PrismObject<ResourceType> resource = (PrismObject<ResourceType>) model.getObject(); return new CapabilitiesDto(resource.asObjectable()); } }; initLayout(model, parentPage); } protected void initLayout(IModel model, PageBase parentPage) { PrismObject<ResourceType> resourceObject = (PrismObject<ResourceType>) model.getObject(); ResourceType resource = resourceObject.asObjectable(); add(createLastAvailabilityStatusInfo(resource)); add(createSourceTargetInfo(resource)); add(createSchemaStatusInfo(resource)); CapabilitiesPanel capabilities = new CapabilitiesPanel(PANEL_CAPABILITIES, capabilitiesModel); add(capabilities); List<ResourceConfigurationDto> resourceConfigList = createResourceConfigList(resource); ListDataProvider<ResourceConfigurationDto> resourceConfigProvider = new ListDataProvider<>( ResourceDetailsTabPanel.this, new ListModel<>(resourceConfigList)); List<ColumnTypeDto<String>> columns = Arrays.asList( new ColumnTypeDto<String>("ShadowType.kind", "objectTypeDefinition.kind", ShadowType.F_KIND.getLocalPart()), new ColumnTypeDto<String>("ShadowType.objectClass", "objectTypeDefinition.objectClass.localPart", ShadowType.F_OBJECT_CLASS.getLocalPart()), new ColumnTypeDto<String>("ShadowType.intent", "objectTypeDefinition.intent", ShadowType.F_INTENT.getLocalPart()), new ColumnTypeDto<String>("ResourceType.isSync", "sync", null)); List<IColumn<SelectableBean<ResourceType>, String>> tableColumns = ColumnUtils.createColumns(columns); PropertyColumn tasksColumn = new PropertyColumn( PageBase.createStringResourceStatic(this, "ResourceType.tasks"), "definedTasks") { @Override public void populateItem(Item item, String componentId, final IModel rowModel) { ResourceConfigurationDto conf = (ResourceConfigurationDto) rowModel.getObject(); RepeatingView repeater = new RepeatingView(componentId); for (final TaskType task : conf.getDefinedTasks()) { repeater.add(new LinkPanel(repeater.newChildId(), new Model<>(task.getName().getOrig())) { @Override public void onClick(AjaxRequestTarget target) { ResourceDetailsTabPanel.this.taskDetailsPerformed(target, task.getOid()); } }); } item.add(repeater); } }; tableColumns.add(tasksColumn); BoxedTablePanel<ResourceConfigurationDto> resourceConfig = new BoxedTablePanel("resourceConfig", resourceConfigProvider, tableColumns); resourceConfig.setAdditionalBoxCssClasses("box-success"); add(resourceConfig); } private List<ResourceConfigurationDto> createResourceConfigList(ResourceType resource) { OperationResult result = new OperationResult(OPERATION_SEARCH_TASKS_FOR_RESOURCE); List<PrismObject<TaskType>> tasks = WebModelServiceUtils.searchObjects(TaskType.class, parentPage.getPrismContext().queryFor(TaskType.class) .item(TaskType.F_OBJECT_REF).ref(resource.getOid()) .and() .item(TaskType.F_PARENT).isNull() .build(), result, parentPage); List<ResourceConfigurationDto> configs = new ArrayList<>(); if (resource.getSchemaHandling() == null) { return configs; } List<ResourceObjectTypeDefinitionType> objectTypes = resource.getSchemaHandling().getObjectType(); if (objectTypes == null) { return configs; } try { for (ResourceObjectTypeDefinitionType objectType : objectTypes) { ObjectSynchronizationType obejctSynchronization = null; if (resource.getSynchronization() != null && resource.getSynchronization().getObjectSynchronization() != null) { obejctSynchronization = getSynchronizationFor(objectType, resource.getSynchronization().getObjectSynchronization(), resource.asPrismObject()); } List<TaskType> syncTask = new ArrayList<>(); if (obejctSynchronization != null) { syncTask = getTaskFor(tasks, obejctSynchronization, resource.asPrismObject()); } ResourceConfigurationDto resourceConfig = new ResourceConfigurationDto(objectType, obejctSynchronization != null, syncTask); configs.add(resourceConfig); } } catch (SchemaException ex) { LoggingUtils.logUnexpectedException(LOGGER, "Could not determine resource configuration", ex); } return configs; } private void taskDetailsPerformed(AjaxRequestTarget target, String taskOid) { PageParameters parameters = new PageParameters(); parameters.add(OnePageParameterEncoder.PARAMETER, taskOid); ((PageBase) getPage()).navigateToNext(PageTaskEdit.class, parameters); } private BasicInfoBoxPanel createSourceTargetInfo(ResourceType resource) { String backgroundColor = "bg-aqua"; SourceTarget sourceTarget = determineIfSourceOrTarget(resource); String numberKey = null; switch (sourceTarget) { case SOURCE: numberKey = "PageResource.resource.source"; break; case TARGET: numberKey = "PageResource.resource.target"; break; case SOURCE_TARGET: numberKey = "PageResource.resource.sourceAndTarget"; break; default: backgroundColor = "bg-gray"; numberKey = "PageResource.resource.noMappings"; break; } InfoBoxType infoBoxType = new InfoBoxType(backgroundColor, sourceTarget.getCssClass(), parentPage.getString("PageResource.resource.mappings")); infoBoxType.setNumber(parentPage.getString(numberKey)); if (isSynchronizationDefined(resource)) { infoBoxType.setDescription(parentPage.getString("PageResource.resource.sync")); } Model<InfoBoxType> boxModel = new Model<>(infoBoxType); return new BasicInfoBoxPanel(ID_SOURCE_TARGET, boxModel); } private InfoBoxPanel createLastAvailabilityStatusInfo(ResourceType resource) { String messageKey = "PageResource.resource.availabilityUnknown"; String backgroundColor = "bg-gray"; String icon = "fa fa-question"; OperationalStateType operationalState = resource.getOperationalState(); if (operationalState != null) { AvailabilityStatusType lastAvailabilityStatus = operationalState.getLastAvailabilityStatus(); if (lastAvailabilityStatus != null) { if (lastAvailabilityStatus == AvailabilityStatusType.UP) { messageKey = "PageResource.resource.up"; backgroundColor = "bg-green"; icon = "fa fa-power-off"; } else if (lastAvailabilityStatus == AvailabilityStatusType.DOWN) { backgroundColor = "bg-red"; messageKey = "PageResource.resource.down"; icon = "fa fa-ban"; } else if (lastAvailabilityStatus == AvailabilityStatusType.BROKEN) { backgroundColor = "bg-yellow"; messageKey = "PageResource.resource.broken"; icon = "fa fa-warning"; } } } InfoBoxType infoBoxType = new InfoBoxType(backgroundColor, icon, parentPage.getString(messageKey)); ConnectorType connectorType = (ConnectorType) resource.getConnectorRef().asReferenceValue().getObject().asObjectable(); if (connectorType == null) { // Connector not found. Probably bad connectorRef reference. infoBoxType.setNumber("--"); infoBoxType.setDescription("--"); } else { String connectorName = StringUtils.substringAfterLast( WebComponentUtil.getEffectiveName(connectorType, ConnectorType.F_CONNECTOR_TYPE), "."); String connectorVersion = connectorType.getConnectorVersion(); infoBoxType.setNumber(connectorName); infoBoxType.setDescription(connectorVersion); } Model<InfoBoxType> boxModel = new Model<>(infoBoxType); InfoBoxPanel lastAvailabilityStatus = new BasicInfoBoxPanel(ID_LAST_AVAILABILITY_STATUS, boxModel); lastAvailabilityStatus.setOutputMarkupId(true); return lastAvailabilityStatus; } private InfoBoxPanel createSchemaStatusInfo(ResourceType resource) { String backgroundColor = "bg-gray"; String icon = "fa fa-times"; String numberMessage = null; String description = null; Integer progress = null; RefinedResourceSchema refinedSchema = null; try { refinedSchema = RefinedResourceSchemaImpl.getRefinedSchema(resource); if (refinedSchema != null) { backgroundColor = "bg-purple"; icon = "fa fa-cubes"; int numObjectTypes = 0; List<? extends RefinedObjectClassDefinition> refinedDefinitions = refinedSchema .getRefinedDefinitions(); for (RefinedObjectClassDefinition refinedDefinition : refinedDefinitions) { if (refinedDefinition.getKind() != null) { numObjectTypes++; } } int numAllDefinitions = refinedDefinitions.size(); numberMessage = numObjectTypes + " " + parentPage.getString("PageResource.resource.objectTypes"); if (numAllDefinitions != 0) { progress = numObjectTypes * 100 / numAllDefinitions; if (progress > 100) { progress = 100; } } description = numAllDefinitions + " " + parentPage.getString("PageResource.resource.schemaDefinitions"); } else { numberMessage = parentPage.getString("PageResource.resource.noSchema"); } } catch (SchemaException e) { backgroundColor = "bg-danger"; icon = "fa fa-warning"; numberMessage = parentPage.getString("PageResource.resource.schemaError"); } InfoBoxType infoBoxType = new InfoBoxType(backgroundColor, icon, parentPage.getString("PageResource.resource.schema")); infoBoxType.setNumber(numberMessage); infoBoxType.setProgress(progress); infoBoxType.setDescription(description); Model<InfoBoxType> boxModel = new Model<>(infoBoxType); return new BasicInfoBoxPanel(ID_SCHEMA_STATUS, boxModel); } private ObjectSynchronizationType getSynchronizationFor( ResourceObjectTypeDefinitionType obejctTypesDefinition, List<ObjectSynchronizationType> synchronizationPolicies, PrismObject<ResourceType> resource) throws SchemaException { for (ObjectSynchronizationType synchronizationPolicy : synchronizationPolicies) { if (SynchronizationUtils.isPolicyApplicable(obejctTypesDefinition.getObjectClass(), obejctTypesDefinition.getKind(), obejctTypesDefinition.getIntent(), synchronizationPolicy, resource)) { if (synchronizationPolicy.getObjectClass().isEmpty()) { synchronizationPolicy.getObjectClass().add(obejctTypesDefinition.getObjectClass()); } return synchronizationPolicy; } } return null; } private List<TaskType> getTaskFor(List<PrismObject<TaskType>> tasks, ObjectSynchronizationType synchronizationPolicy, PrismObject<ResourceType> resource) throws SchemaException { List<TaskType> syncTasks = new ArrayList<>(); for (PrismObject<TaskType> task : tasks) { PrismProperty<ShadowKindType> taskKind = task .findProperty(ItemPath.create(TaskType.F_EXTENSION, SchemaConstants.MODEL_EXTENSION_KIND)); ShadowKindType taskKindValue = null; if (taskKind != null) { taskKindValue = taskKind.getRealValue(); } PrismProperty<String> taskIntent = task .findProperty(ItemPath.create(TaskType.F_EXTENSION, SchemaConstants.MODEL_EXTENSION_INTENT)); String taskIntentValue = null; if (taskIntent != null) { taskIntentValue = taskIntent.getRealValue(); } PrismProperty<QName> taskObjectClass = task.findProperty( ItemPath.create(TaskType.F_EXTENSION, SchemaConstants.MODEL_EXTENSION_OBJECTCLASS)); QName taskObjectClassValue = null; if (taskObjectClass != null) { taskObjectClassValue = taskObjectClass.getRealValue(); } // TODO: unify with determineObjectClass in Utils (model-impl, which // is not accessible in admin-gui) if (taskObjectClassValue == null) { ObjectClassComplexTypeDefinition taskObjectClassDef = null; RefinedResourceSchema schema = RefinedResourceSchemaImpl.getRefinedSchema(resource); if (schema == null) { throw new SchemaException( "No schema defined in resource. Possible configuration problem?"); } if (taskKindValue == null && taskIntentValue == null) { taskObjectClassDef = schema.findDefaultObjectClassDefinition(ShadowKindType.ACCOUNT); } if (taskKindValue != null) { if (StringUtils.isEmpty(taskIntentValue)) { taskObjectClassDef = schema.findDefaultObjectClassDefinition(taskKindValue); } else { taskObjectClassDef = schema.findObjectClassDefinition(taskKindValue, taskIntentValue); } } if (taskObjectClassDef != null) { taskObjectClassValue = taskObjectClassDef.getTypeName(); } } if (SynchronizationUtils.isPolicyApplicable(taskObjectClassValue, taskKindValue, taskIntentValue, synchronizationPolicy, resource, true)) { syncTasks.add(task.asObjectable()); } } return syncTasks; } // TODO: ####### start of move to ResourceTypeUtil ########### private boolean isOutboundDefined(ResourceAttributeDefinitionType attr) { return attr.getOutbound() != null && (attr.getOutbound().getSource() != null || attr.getOutbound().getExpression() != null); } private boolean isInboundDefined(ResourceAttributeDefinitionType attr) { return attr.getInbound() != null && CollectionUtils.isNotEmpty(attr.getInbound()) && (attr.getInbound().get(0).getTarget() != null || attr.getInbound().get(0).getExpression() != null); } private boolean isSynchronizationDefined(ResourceType resource) { if (resource.getSynchronization() == null) { return false; } if (resource.getSynchronization().getObjectSynchronization().isEmpty()) { return false; } for (ObjectSynchronizationType syncType : resource.getSynchronization().getObjectSynchronization()) { if (syncType.isEnabled() != null && !syncType.isEnabled()) { continue; } if (CollectionUtils.isEmpty(syncType.getReaction())) { continue; } return true; } return false; } private SourceTarget determineCredentialsMappings(ResourceType resource) { if (resource.getSchemaHandling() != null && CollectionUtils.isNotEmpty(resource.getSchemaHandling().getObjectType())) { boolean hasOutbound = false; boolean hasInbound = false; for (ResourceObjectTypeDefinitionType resourceObjectTypeDefinition : resource.getSchemaHandling() .getObjectType()) { if (hasInbound && hasOutbound) { return SourceTarget.SOURCE_TARGET; } if (resourceObjectTypeDefinition.getCredentials() == null) { continue; } if (resourceObjectTypeDefinition.getCredentials().getPassword() == null) { continue; } ResourcePasswordDefinitionType passwordDef = resourceObjectTypeDefinition.getCredentials() .getPassword(); if (!hasOutbound) { hasOutbound = passwordDef.getOutbound() != null; } if (!hasInbound) { hasInbound = CollectionUtils.isNotEmpty(passwordDef.getInbound()); } } if (hasInbound) { return SourceTarget.SOURCE; } if (hasOutbound) { return SourceTarget.TARGET; } } return SourceTarget.NOT_DEFINED; } private SourceTarget determineActivationMappings(ResourceType resource) { if (resource.getSchemaHandling() != null && CollectionUtils.isNotEmpty(resource.getSchemaHandling().getObjectType())) { boolean hasOutbound = false; boolean hasInbound = false; for (ResourceObjectTypeDefinitionType resourceObjectTypeDefinition : resource.getSchemaHandling() .getObjectType()) { if (hasInbound && hasOutbound) { return SourceTarget.SOURCE_TARGET; } if (resourceObjectTypeDefinition.getActivation() == null) { continue; } if (!hasOutbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getAdministrativeStatus() != null && CollectionUtils .isNotEmpty(activationDef.getAdministrativeStatus().getOutbound())) { hasOutbound = true; } } if (!hasOutbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getValidFrom() != null && CollectionUtils.isNotEmpty(activationDef.getValidFrom().getOutbound())) { hasOutbound = true; } } if (!hasOutbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getValidTo() != null && CollectionUtils.isNotEmpty(activationDef.getValidTo().getOutbound())) { hasOutbound = true; } } if (!hasOutbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getExistence() != null && CollectionUtils.isNotEmpty(activationDef.getExistence().getOutbound())) { hasOutbound = true; } } if (!hasInbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getAdministrativeStatus() != null && CollectionUtils .isNotEmpty(activationDef.getAdministrativeStatus().getInbound())) { hasInbound = true; } } if (!hasInbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getValidFrom() != null && CollectionUtils.isNotEmpty(activationDef.getValidFrom().getInbound())) { hasInbound = true; } } if (!hasInbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getValidTo() != null && CollectionUtils.isNotEmpty(activationDef.getValidTo().getInbound())) { hasInbound = true; } } if (!hasInbound) { ResourceActivationDefinitionType activationDef = resourceObjectTypeDefinition .getActivation(); if (activationDef.getExistence() != null && CollectionUtils.isNotEmpty(activationDef.getExistence().getInbound())) { hasInbound = true; } } } if (hasInbound) { return SourceTarget.SOURCE; } if (hasOutbound) { return SourceTarget.TARGET; } } return SourceTarget.NOT_DEFINED; } private SourceTarget determineIfSourceOrTarget(ResourceType resource) { if (resource.getSchemaHandling() != null && CollectionUtils.isNotEmpty(resource.getSchemaHandling().getObjectType())) { boolean hasOutbound = false; boolean hasInbound = false; for (ResourceObjectTypeDefinitionType resourceObjectTypeDefinition : resource.getSchemaHandling() .getObjectType()) { if (CollectionUtils.isEmpty(resourceObjectTypeDefinition.getAttribute())) { continue; } if (hasInbound && hasOutbound) { return SourceTarget.SOURCE_TARGET; } for (ResourceAttributeDefinitionType attr : resourceObjectTypeDefinition.getAttribute()) { if (hasInbound && hasOutbound) { return SourceTarget.SOURCE_TARGET; } if (!hasOutbound) { hasOutbound = isOutboundDefined(attr); } if (!hasInbound) { hasInbound = isInboundDefined(attr); } } // TODO: what about situation that we have only } if (hasOutbound) { return SourceTarget.TARGET; } if (hasInbound) { return SourceTarget.SOURCE; } } return SourceTarget.NOT_DEFINED; } // TODO: ####### end of move to ResourceTypeUtil ########### private enum SourceTarget { NOT_DEFINED("fa fa-square-o"), SOURCE("fa fa-sign-in"), TARGET("fa fa-sign-out"), SOURCE_TARGET("fa fa-exchange"); private String cssClass; SourceTarget(String cssClass) { this.cssClass = cssClass; } public String getCssClass() { return cssClass; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.groupby.epinephelinae; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.aggregation.AggregatorAdapters; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.dimension.DefaultDimensionSpec; import org.apache.druid.query.groupby.orderby.DefaultLimitSpec; import org.apache.druid.query.groupby.orderby.OrderByColumnSpec; import org.apache.druid.query.ordering.StringComparator; import org.apache.druid.query.ordering.StringComparators; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; public class LimitedBufferHashGrouperTest extends InitializedNullHandlingTest { static final int LIMIT = 100; static final int KEY_BASE = 100000; static final int NUM_ROWS = 1000; @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testLimitAndBufferSwapping() { final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); final LimitedBufferHashGrouper<Integer> grouper = makeGrouper(columnSelectorFactory, 20000); columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < NUM_ROWS; i++) { Assert.assertTrue(String.valueOf(i + KEY_BASE), grouper.aggregate(i + KEY_BASE).isOk()); } if (NullHandling.replaceWithDefault()) { // bucket size is hash(int) + key(int) + aggs(2 longs) + heap offset(int) = 28 bytes // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes // table arena is split in halves when doing push down, so each half is 9798 bytes // each table arena half can hold 9798 / 28 = 349 buckets, with load factor of 0.5 max buckets per half is 174 // First buffer swap occurs when we hit 174 buckets // Subsequent buffer swaps occur after every 74 buckets, since we keep 100 buckets due to the limit // With 1000 keys inserted, this results in one swap at the first 174 buckets, then 11 swaps afterwards. // After the last swap, we have 100 keys + 12 new keys inserted. Assert.assertEquals(12, grouper.getGrowthCount()); Assert.assertEquals(112, grouper.getSize()); Assert.assertEquals(349, grouper.getBuckets()); Assert.assertEquals(174, grouper.getMaxSize()); } else { // With Nullability enabled // bucket size is hash(int) + key(int) + aggs(2 longs + 1 bytes for Long Agg nullability) + heap offset(int) = 29 bytes // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes // table arena is split in halves when doing push down, so each half is 9798 bytes // each table arena half can hold 9798 / 29 = 337 buckets, with load factor of 0.5 max buckets per half is 168 // First buffer swap occurs when we hit 168 buckets // Subsequent buffer swaps occur after every 68 buckets, since we keep 100 buckets due to the limit // With 1000 keys inserted, this results in one swap at the first 169 buckets, then 12 swaps afterwards. // After the last swap, we have 100 keys + 16 new keys inserted. Assert.assertEquals(13, grouper.getGrowthCount()); Assert.assertEquals(116, grouper.getSize()); Assert.assertEquals(337, grouper.getBuckets()); Assert.assertEquals(168, grouper.getMaxSize()); } Assert.assertEquals(100, grouper.getLimit()); // Aggregate slightly different row // Since these keys are smaller, they will evict the previous 100 top entries // First 100 of these new rows will be the expected results. columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); for (int i = 0; i < NUM_ROWS; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } if (NullHandling.replaceWithDefault()) { // we added another 1000 unique keys // previous size is 112, so next swap occurs after 62 rows // after that, there are 1000 - 62 = 938 rows, 938 / 74 = 12 additional swaps after the first, // with 50 keys being added after the final swap. Assert.assertEquals(25, grouper.getGrowthCount()); Assert.assertEquals(150, grouper.getSize()); Assert.assertEquals(349, grouper.getBuckets()); Assert.assertEquals(174, grouper.getMaxSize()); } else { // With Nullable Aggregator // we added another 1000 unique keys // previous size is 116, so next swap occurs after 52 rows // after that, there are 1000 - 52 = 948 rows, 948 / 68 = 13 additional swaps after the first, // with 64 keys being added after the final swap. Assert.assertEquals(27, grouper.getGrowthCount()); Assert.assertEquals(164, grouper.getSize()); Assert.assertEquals(337, grouper.getBuckets()); Assert.assertEquals(168, grouper.getMaxSize()); } Assert.assertEquals(100, grouper.getLimit()); final List<Grouper.Entry<Integer>> expected = new ArrayList<>(); for (int i = 0; i < LIMIT; i++) { expected.add(new Grouper.Entry<>(i, new Object[]{11L, 1L})); } Assert.assertEquals(expected, Lists.newArrayList(grouper.iterator(true))); // iterate again, even though the min-max offset heap has been destroyed, it is replaced with a reverse sorted array Assert.assertEquals(expected, Lists.newArrayList(grouper.iterator(true))); } @Test public void testBufferTooSmall() { expectedException.expect(IAE.class); expectedException.expectMessage("LimitedBufferHashGrouper initialized with insufficient buffer capacity"); final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); makeGrouper(columnSelectorFactory, 10); } @Test public void testMinBufferSize() { final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); final LimitedBufferHashGrouper<Integer> grouper = makeGrouper(columnSelectorFactory, 12120); columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < NUM_ROWS; i++) { Assert.assertTrue(String.valueOf(i + KEY_BASE), grouper.aggregate(i + KEY_BASE).isOk()); } // With minimum buffer size, after the first swap, every new key added will result in a swap if (NullHandling.replaceWithDefault()) { Assert.assertEquals(224, grouper.getGrowthCount()); Assert.assertEquals(104, grouper.getSize()); Assert.assertEquals(209, grouper.getBuckets()); Assert.assertEquals(104, grouper.getMaxSize()); } else { Assert.assertEquals(899, grouper.getGrowthCount()); Assert.assertEquals(101, grouper.getSize()); Assert.assertEquals(202, grouper.getBuckets()); Assert.assertEquals(101, grouper.getMaxSize()); } Assert.assertEquals(100, grouper.getLimit()); // Aggregate slightly different row // Since these keys are smaller, they will evict the previous 100 top entries // First 100 of these new rows will be the expected results. columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); for (int i = 0; i < NUM_ROWS; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } if (NullHandling.replaceWithDefault()) { Assert.assertEquals(474, grouper.getGrowthCount()); Assert.assertEquals(104, grouper.getSize()); Assert.assertEquals(209, grouper.getBuckets()); Assert.assertEquals(104, grouper.getMaxSize()); } else { Assert.assertEquals(1899, grouper.getGrowthCount()); Assert.assertEquals(101, grouper.getSize()); Assert.assertEquals(202, grouper.getBuckets()); Assert.assertEquals(101, grouper.getMaxSize()); } Assert.assertEquals(100, grouper.getLimit()); final List<Grouper.Entry<Integer>> expected = new ArrayList<>(); for (int i = 0; i < LIMIT; i++) { expected.add(new Grouper.Entry<>(i, new Object[]{11L, 1L})); } Assert.assertEquals(expected, Lists.newArrayList(grouper.iterator(true))); // iterate again, even though the min-max offset heap has been destroyed, it is replaced with a reverse sorted array Assert.assertEquals(expected, Lists.newArrayList(grouper.iterator(true))); } @Test public void testAggregateAfterIterated() { expectedException.expect(IllegalStateException.class); expectedException.expectMessage("attempted to add offset after grouper was iterated"); final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); final LimitedBufferHashGrouper<Integer> grouper = makeGrouper(columnSelectorFactory, 12120); columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < NUM_ROWS; i++) { Assert.assertTrue(String.valueOf(i + KEY_BASE), grouper.aggregate(i + KEY_BASE).isOk()); } List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true)); Assert.assertEquals(LIMIT, iterated.size()); // an attempt to aggregate with a new key will explode after the grouper has been iterated grouper.aggregate(KEY_BASE + NUM_ROWS + 1); } @Test public void testIteratorOrderByDim() { final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); final LimitedBufferHashGrouper<Integer> grouper = makeGrouperWithOrderBy( columnSelectorFactory, "value", OrderByColumnSpec.Direction.ASCENDING ); for (int i = 0; i < NUM_ROWS; i++) { // limited grouper iterator will always sort by keys in ascending order, even if the heap was sorted by values // so, we aggregate with keys and values both descending so that the results are not re-ordered by key columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", NUM_ROWS - i + KEY_BASE))); Assert.assertTrue(String.valueOf(NUM_ROWS - i + KEY_BASE), grouper.aggregate(NUM_ROWS - i + KEY_BASE).isOk()); } List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true)); Assert.assertEquals(LIMIT, iterated.size()); for (int i = 0; i < LIMIT; i++) { Assert.assertEquals(KEY_BASE + i + 1L, iterated.get(i).getValues()[0]); } } @Test public void testIteratorOrderByDimDesc() { final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); final LimitedBufferHashGrouper<Integer> grouper = makeGrouperWithOrderBy( columnSelectorFactory, "value", OrderByColumnSpec.Direction.DESCENDING ); for (int i = 0; i < NUM_ROWS; i++) { // limited grouper iterator will always sort by keys in ascending order, even if the heap was sorted by values // so, we aggregate with keys and values both ascending so that the results are not re-ordered by key columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", i + 1))); Assert.assertTrue(String.valueOf(i + KEY_BASE), grouper.aggregate(i + KEY_BASE).isOk()); } List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true)); Assert.assertEquals(LIMIT, iterated.size()); for (int i = 0; i < LIMIT; i++) { Assert.assertEquals((long) NUM_ROWS - i, iterated.get(i).getValues()[0]); } } @Test public void testIteratorOrderByAggs() { final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); final LimitedBufferHashGrouper<Integer> grouper = makeGrouperWithOrderBy( columnSelectorFactory, "valueSum", OrderByColumnSpec.Direction.ASCENDING ); for (int i = 0; i < NUM_ROWS; i++) { // limited grouper iterator will always sort by keys in ascending order, even if the heap was sorted by values // so, we aggregate with keys and values both descending so that the results are not re-ordered by key columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", NUM_ROWS - i))); Assert.assertTrue(String.valueOf(NUM_ROWS - i + KEY_BASE), grouper.aggregate(NUM_ROWS - i + KEY_BASE).isOk()); } List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true)); Assert.assertEquals(LIMIT, iterated.size()); for (int i = 0; i < LIMIT; i++) { Assert.assertEquals(i + 1L, iterated.get(i).getValues()[0]); } } @Test public void testIteratorOrderByAggsDesc() { final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory(); final LimitedBufferHashGrouper<Integer> grouper = makeGrouperWithOrderBy( columnSelectorFactory, "valueSum", OrderByColumnSpec.Direction.DESCENDING ); for (int i = 0; i < NUM_ROWS; i++) { // limited grouper iterator will always sort by keys in ascending order, even if the heap was sorted by values // so, we aggregate with keys descending and values asending so that the results are not re-ordered by key columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", i + 1))); Assert.assertTrue(String.valueOf(NUM_ROWS - i + KEY_BASE), grouper.aggregate(NUM_ROWS - i + KEY_BASE).isOk()); } List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true)); Assert.assertEquals(LIMIT, iterated.size()); for (int i = 0; i < LIMIT; i++) { Assert.assertEquals((long) NUM_ROWS - i, iterated.get(i).getValues()[0]); } } private static LimitedBufferHashGrouper<Integer> makeGrouper( TestColumnSelectorFactory columnSelectorFactory, int bufferSize ) { LimitedBufferHashGrouper<Integer> grouper = new LimitedBufferHashGrouper<>( Suppliers.ofInstance(ByteBuffer.allocate(bufferSize)), GrouperTestUtil.intKeySerde(), AggregatorAdapters.factorizeBuffered( columnSelectorFactory, ImmutableList.of( new LongSumAggregatorFactory("valueSum", "value"), new CountAggregatorFactory("count") ) ), Integer.MAX_VALUE, 0.5f, 2, LIMIT, false ); grouper.init(); return grouper; } private static LimitedBufferHashGrouper<Integer> makeGrouperWithOrderBy( TestColumnSelectorFactory columnSelectorFactory, String orderByColumn, OrderByColumnSpec.Direction direction ) { final StringComparator stringComparator = "value".equals(orderByColumn) ? StringComparators.LEXICOGRAPHIC : StringComparators.NUMERIC; final DefaultLimitSpec orderBy = DefaultLimitSpec.builder() .orderBy( new OrderByColumnSpec( orderByColumn, direction, stringComparator ) ) .limit(LIMIT) .build(); LimitedBufferHashGrouper<Integer> grouper = new LimitedBufferHashGrouper<>( Suppliers.ofInstance(ByteBuffer.allocate(12120)), new GroupByIshKeySerde(orderBy), AggregatorAdapters.factorizeBuffered( columnSelectorFactory, ImmutableList.of( new LongSumAggregatorFactory("valueSum", "value"), new CountAggregatorFactory("count") ) ), Integer.MAX_VALUE, 0.5f, 2, LIMIT, !orderBy.getColumns().get(0).getDimension().equals("value") ); grouper.init(); return grouper; } /** * key serde for more realistic ordering tests, similar to the {@link GroupByQueryEngineV2.GroupByEngineKeySerde} or * {@link RowBasedGrouperHelper.RowBasedKeySerde} which are likely to be used in practice by the group-by engine, * which also both use {@link GrouperBufferComparatorUtils} to make comparators */ private static class GroupByIshKeySerde extends IntKeySerde { private final DefaultLimitSpec orderBy; public GroupByIshKeySerde(DefaultLimitSpec orderBy) { this.orderBy = orderBy; } @Override public Grouper.BufferComparator bufferComparator() { return GrouperBufferComparatorUtils.bufferComparator( false, false, 1, new Grouper.BufferComparator[] {KEY_COMPARATOR} ); } @Override public Grouper.BufferComparator bufferComparatorWithAggregators( AggregatorFactory[] aggregatorFactories, int[] aggregatorOffsets ) { return GrouperBufferComparatorUtils.bufferComparatorWithAggregators( aggregatorFactories, aggregatorOffsets, orderBy, ImmutableList.of(DefaultDimensionSpec.of("value")), new Grouper.BufferComparator[] {KEY_COMPARATOR}, false, false, Integer.BYTES ); } } }
/* Derby - Class org.apache.derby.impl.store.raw.data.PhysicalUndoOperation Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.store.raw.data; import org.apache.derby.iapi.services.io.StoredFormatIds; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.store.raw.Compensation; import org.apache.derby.iapi.store.raw.Loggable; import org.apache.derby.iapi.store.raw.Transaction; import org.apache.derby.iapi.store.raw.Undoable; import org.apache.derby.iapi.store.raw.log.LogInstant; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.iapi.util.ByteArray; import java.io.IOException; import org.apache.derby.iapi.services.io.LimitObjectInput; /** PhysicalUndoOperation is a compensation operation that rolls back the change of an Undo-able operation. A PhysicalUndoOperation itself is not undo-able, i.e, it is loggable but not undoable. <PRE> @derby.formatId LOGOP_PAGE_PHYSICAL_UNDO the formatId is written by FormatIdOutputStream when this object is written out by writeObject @derby.purpose update a physical log operation @derby.upgrade @derby.diskLayout PageBasicOperation the super class OptionalData none (compensation operation never have optional data) @derby.endFormat </PRE> */ public final class PhysicalUndoOperation extends PageBasicOperation implements Compensation { /** The operation to be rolled back */ transient private PhysicalPageOperation undoOp; PhysicalUndoOperation(BasePage page) { super(page); } /** Set up a compensation operation during run time rollback */ PhysicalUndoOperation(BasePage page, PhysicalPageOperation op) { super(page); undoOp = op; } /** Return my format identifier. */ // no-arg constructor, required by Formatable public PhysicalUndoOperation() { super(); } public int getTypeFormatId() { return StoredFormatIds.LOGOP_PAGE_PHYSICAL_UNDO; } // no fields, therefore no writeExternal or readExternal /** Compensation methods */ /** Set up a PageUndoOperation during recovery redo. */ public void setUndoOp(Undoable op) { if (SanityManager.DEBUG) { SanityManager.ASSERT(op instanceof PhysicalPageOperation); } undoOp = (PhysicalPageOperation)op; } /** Loggable methods */ /** Apply the undo operation, in this implementation of the RawStore, it can only call the undoMe method of undoOp @param xact the Transaction that is doing the rollback @param instant the log instant of this undo operation @param in optional data @exception IOException Can be thrown by any of the methods of InputStream. @exception StandardException Standard Derby policy. */ public final void doMe(Transaction xact, LogInstant instant, LimitObjectInput in) throws StandardException, IOException { long oldversion = 0; // sanity check LogInstant oldLogInstant = null; // sanity check if (SanityManager.DEBUG) { oldLogInstant = this.page.getLastLogInstant(); oldversion = this.page.getPageVersion(); SanityManager.ASSERT(oldversion == this.getPageVersion()); SanityManager.ASSERT(oldLogInstant == null || instant == null || oldLogInstant.lessThan(instant)); } // if this is called during runtime rollback, PageOp.generateUndo found // the page and have it latched there. // if this is called during recovery redo, this.needsRedo found the page and // have it latched here // // in either case, this.page is the correct page and is latched. // undoOp.undoMe(xact, this.page, instant, in); if (SanityManager.DEBUG) { if (oldversion >= this.page.getPageVersion()) { SanityManager.THROWASSERT( "oldversion = " + oldversion + ";page version = " + this.page.getPageVersion() + "page = " + page + "; my class name is " + getClass().getName() + " undoOp is " + undoOp.getClass().getName() ); } SanityManager.ASSERT( oldversion < this.page.getPageVersion()); if (instant != null && ! instant.equals(this.page.getLastLogInstant())) SanityManager.THROWASSERT( "my class name is " + getClass().getName() + " undoOp is " + undoOp.getClass().getName() ); } releaseResource(xact); } /* make sure resource found in undoOp is released */ public void releaseResource(Transaction xact) { if (undoOp != null) undoOp.releaseResource(xact); super.releaseResource(xact); } /* Undo operation is a COMPENSATION log operation */ public int group() { return super.group() | Loggable.COMPENSATION | Loggable.RAWSTORE; } public final ByteArray getPreparedLog() { // should never ever write optional data because this implementation of // the recovery system will never read this and pass this on to dome. // Instead, the optional data of the undoOp will be used - since // this.doMe can only call undoOP.undoMe, this has no use for any // optional data. return (ByteArray) null; } public void restoreMe(Transaction xact, BasePage undoPage, LogInstant CLRinstant, LimitObjectInput in) { // Not undoable if (SanityManager.DEBUG) SanityManager.THROWASSERT("cannot call restore me on PhysicalUndoOperation"); } /** DEBUG: Print self. */ public String toString() { if (SanityManager.DEBUG) { String str = "CLR (Physical Undo): " + super.toString(); if (undoOp != null) str += "\n" + undoOp.toString(); else str += "undo Operation not set"; return str; } else return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.EventDriven; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.ReadsAttribute; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.AttributeExpression; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractSessionFactoryProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessSessionFactory; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processor.util.pattern.ErrorTypes; import org.apache.nifi.processor.util.pattern.ExceptionHandler; import org.apache.nifi.processor.util.pattern.PartialFunctions; import org.apache.nifi.processor.util.pattern.Put; import org.apache.nifi.processor.util.pattern.RollbackOnFailure; import org.apache.nifi.processor.util.pattern.RoutingResult; import org.apache.nifi.serialization.MalformedRecordException; import org.apache.nifi.serialization.RecordReader; import org.apache.nifi.serialization.RecordReaderFactory; import org.apache.nifi.serialization.record.Record; import org.apache.nifi.serialization.record.RecordField; import org.apache.nifi.serialization.record.RecordSchema; import java.io.IOException; import java.io.InputStream; import java.sql.BatchUpdateException; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLDataException; import java.sql.SQLException; import java.sql.SQLIntegrityConstraintViolationException; import java.sql.SQLNonTransientException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static java.lang.String.format; @EventDriven @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"sql", "record", "jdbc", "put", "database", "update", "insert", "delete"}) @CapabilityDescription("The PutDatabaseRecord processor uses a specified RecordReader to input (possibly multiple) records from an incoming flow file. These records are translated to SQL " + "statements and executed as a single batch. If any errors occur, the flow file is routed to failure or retry, and if the records are transmitted successfully, the incoming flow file is " + "routed to success. The type of statement executed by the processor is specified via the Statement Type property, which accepts some hard-coded values such as INSERT, UPDATE, and DELETE, " + "as well as 'Use statement.type Attribute', which causes the processor to get the statement type from a flow file attribute. IMPORTANT: If the Statement Type is UPDATE, then the incoming " + "records must not alter the value(s) of the primary keys (or user-specified Update Keys). If such records are encountered, the UPDATE statement issued to the database may do nothing " + "(if no existing records with the new primary key values are found), or could inadvertently corrupt the existing data (by changing records for which the new values of the primary keys " + "exist).") @ReadsAttribute(attribute = PutDatabaseRecord.STATEMENT_TYPE_ATTRIBUTE, description = "If 'Use statement.type Attribute' is selected for the Statement Type property, the value of this attribute " + "will be used to determine the type of statement (INSERT, UPDATE, DELETE, SQL, etc.) to generate and execute.") @WritesAttribute(attribute = PutDatabaseRecord.PUT_DATABASE_RECORD_ERROR, description = "If an error occurs during processing, the flow file will be routed to failure or retry, and this attribute " + "will be populated with the cause of the error.") public class PutDatabaseRecord extends AbstractSessionFactoryProcessor { static final String UPDATE_TYPE = "UPDATE"; static final String INSERT_TYPE = "INSERT"; static final String DELETE_TYPE = "DELETE"; static final String SQL_TYPE = "SQL"; // Not an allowable value in the Statement Type property, must be set by attribute static final String USE_ATTR_TYPE = "Use statement.type Attribute"; static final String STATEMENT_TYPE_ATTRIBUTE = "statement.type"; static final String PUT_DATABASE_RECORD_ERROR = "putdatabaserecord.error"; static final AllowableValue IGNORE_UNMATCHED_FIELD = new AllowableValue("Ignore Unmatched Fields", "Ignore Unmatched Fields", "Any field in the document that cannot be mapped to a column in the database is ignored"); static final AllowableValue FAIL_UNMATCHED_FIELD = new AllowableValue("Fail on Unmatched Fields", "Fail on Unmatched Fields", "If the document has any field that cannot be mapped to a column in the database, the FlowFile will be routed to the failure relationship"); static final AllowableValue IGNORE_UNMATCHED_COLUMN = new AllowableValue("Ignore Unmatched Columns", "Ignore Unmatched Columns", "Any column in the database that does not have a field in the document will be assumed to not be required. No notification will be logged"); static final AllowableValue WARNING_UNMATCHED_COLUMN = new AllowableValue("Warn on Unmatched Columns", "Warn on Unmatched Columns", "Any column in the database that does not have a field in the document will be assumed to not be required. A warning will be logged"); static final AllowableValue FAIL_UNMATCHED_COLUMN = new AllowableValue("Fail on Unmatched Columns", "Fail on Unmatched Columns", "A flow will fail if any column in the database that does not have a field in the document. An error will be logged"); // Relationships public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Successfully created FlowFile from SQL query result set.") .build(); static final Relationship REL_RETRY = new Relationship.Builder() .name("retry") .description("A FlowFile is routed to this relationship if the database cannot be updated but attempting the operation again may succeed") .build(); static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("A FlowFile is routed to this relationship if the database cannot be updated and retrying the operation will also fail, " + "such as an invalid query or an integrity constraint violation") .build(); protected static Set<Relationship> relationships; // Properties static final PropertyDescriptor RECORD_READER_FACTORY = new PropertyDescriptor.Builder() .name("put-db-record-record-reader") .displayName("Record Reader") .description("Specifies the Controller Service to use for parsing incoming data and determining the data's schema.") .identifiesControllerService(RecordReaderFactory.class) .required(true) .build(); static final PropertyDescriptor STATEMENT_TYPE = new PropertyDescriptor.Builder() .name("put-db-record-statement-type") .displayName("Statement Type") .description("Specifies the type of SQL Statement to generate. If 'Use statement.type Attribute' is chosen, then the value is taken from the statement.type attribute in the " + "FlowFile. The 'Use statement.type Attribute' option is the only one that allows the 'SQL' statement type. If 'SQL' is specified, the value of the field specified by the " + "'Field Containing SQL' property is expected to be a valid SQL statement on the target database, and will be executed as-is.") .required(true) .allowableValues(UPDATE_TYPE, INSERT_TYPE, DELETE_TYPE, USE_ATTR_TYPE) .build(); static final PropertyDescriptor DBCP_SERVICE = new PropertyDescriptor.Builder() .name("put-db-record-dcbp-service") .displayName("Database Connection Pooling Service") .description("The Controller Service that is used to obtain a connection to the database for sending records.") .required(true) .identifiesControllerService(DBCPService.class) .build(); static final PropertyDescriptor CATALOG_NAME = new PropertyDescriptor.Builder() .name("put-db-record-catalog-name") .displayName("Catalog Name") .description("The name of the catalog that the statement should update. This may not apply for the database that you are updating. In this case, leave the field empty") .required(false) .expressionLanguageSupported(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); static final PropertyDescriptor SCHEMA_NAME = new PropertyDescriptor.Builder() .name("put-db-record-schema-name") .displayName("Schema Name") .description("The name of the schema that the table belongs to. This may not apply for the database that you are updating. In this case, leave the field empty") .required(false) .expressionLanguageSupported(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder() .name("put-db-record-table-name") .displayName("Table Name") .description("The name of the table that the statement should affect.") .required(true) .expressionLanguageSupported(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); static final PropertyDescriptor TRANSLATE_FIELD_NAMES = new PropertyDescriptor.Builder() .name("put-db-record-translate-field-names") .displayName("Translate Field Names") .description("If true, the Processor will attempt to translate field names into the appropriate column names for the table specified. " + "If false, the field names must match the column names exactly, or the column will not be updated") .allowableValues("true", "false") .defaultValue("true") .build(); static final PropertyDescriptor UNMATCHED_FIELD_BEHAVIOR = new PropertyDescriptor.Builder() .name("put-db-record-unmatched-field-behavior") .displayName("Unmatched Field Behavior") .description("If an incoming record has a field that does not map to any of the database table's columns, this property specifies how to handle the situation") .allowableValues(IGNORE_UNMATCHED_FIELD, FAIL_UNMATCHED_FIELD) .defaultValue(IGNORE_UNMATCHED_FIELD.getValue()) .build(); static final PropertyDescriptor UNMATCHED_COLUMN_BEHAVIOR = new PropertyDescriptor.Builder() .name("put-db-record-unmatched-column-behavior") .displayName("Unmatched Column Behavior") .description("If an incoming record does not have a field mapping for all of the database table's columns, this property specifies how to handle the situation") .allowableValues(IGNORE_UNMATCHED_COLUMN, WARNING_UNMATCHED_COLUMN, FAIL_UNMATCHED_COLUMN) .defaultValue(FAIL_UNMATCHED_COLUMN.getValue()) .build(); static final PropertyDescriptor UPDATE_KEYS = new PropertyDescriptor.Builder() .name("put-db-record-update-keys") .displayName("Update Keys") .description("A comma-separated list of column names that uniquely identifies a row in the database for UPDATE statements. " + "If the Statement Type is UPDATE and this property is not set, the table's Primary Keys are used. " + "In this case, if no Primary Key exists, the conversion to SQL will fail if Unmatched Column Behaviour is set to FAIL. " + "This property is ignored if the Statement Type is INSERT") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .required(false) .expressionLanguageSupported(true) .build(); static final PropertyDescriptor FIELD_CONTAINING_SQL = new PropertyDescriptor.Builder() .name("put-db-record-field-containing-sql") .displayName("Field Containing SQL") .description("If the Statement Type is 'SQL' (as set in the statement.type attribute), this field indicates which field in the record(s) contains the SQL statement to execute. The value " + "of the field must be a single SQL statement. If the Statement Type is not 'SQL', this field is ignored.") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .required(false) .expressionLanguageSupported(true) .build(); static final PropertyDescriptor QUOTED_IDENTIFIERS = new PropertyDescriptor.Builder() .name("put-db-record-quoted-identifiers") .displayName("Quote Column Identifiers") .description("Enabling this option will cause all column names to be quoted, allowing you to use reserved words as column names in your tables.") .allowableValues("true", "false") .defaultValue("false") .build(); static final PropertyDescriptor QUOTED_TABLE_IDENTIFIER = new PropertyDescriptor.Builder() .name("put-db-record-quoted-table-identifiers") .displayName("Quote Table Identifiers") .description("Enabling this option will cause the table name to be quoted to support the use of special characters in the table name.") .allowableValues("true", "false") .defaultValue("false") .build(); static final PropertyDescriptor QUERY_TIMEOUT = new PropertyDescriptor.Builder() .name("put-db-record-query-timeout") .displayName("Max Wait Time") .description("The maximum amount of time allowed for a running SQL statement " + ", zero means there is no limit. Max time less than 1 second will be equal to zero.") .defaultValue("0 seconds") .required(true) .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .expressionLanguageSupported(true) .build(); protected static List<PropertyDescriptor> propDescriptors; private final Map<SchemaKey, TableSchema> schemaCache = new LinkedHashMap<SchemaKey, TableSchema>(100) { private static final long serialVersionUID = 1L; @Override protected boolean removeEldestEntry(Map.Entry<SchemaKey, TableSchema> eldest) { return size() >= 100; } }; static { final Set<Relationship> r = new HashSet<>(); r.add(REL_SUCCESS); r.add(REL_FAILURE); r.add(REL_RETRY); relationships = Collections.unmodifiableSet(r); final List<PropertyDescriptor> pds = new ArrayList<>(); pds.add(RECORD_READER_FACTORY); pds.add(STATEMENT_TYPE); pds.add(DBCP_SERVICE); pds.add(CATALOG_NAME); pds.add(SCHEMA_NAME); pds.add(TABLE_NAME); pds.add(TRANSLATE_FIELD_NAMES); pds.add(UNMATCHED_FIELD_BEHAVIOR); pds.add(UNMATCHED_COLUMN_BEHAVIOR); pds.add(UPDATE_KEYS); pds.add(FIELD_CONTAINING_SQL); pds.add(QUOTED_IDENTIFIERS); pds.add(QUOTED_TABLE_IDENTIFIER); pds.add(QUERY_TIMEOUT); pds.add(RollbackOnFailure.ROLLBACK_ON_FAILURE); propDescriptors = Collections.unmodifiableList(pds); } private Put<FunctionContext, Connection> process; private ExceptionHandler<FunctionContext> exceptionHandler; @Override public Set<Relationship> getRelationships() { return relationships; } @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return propDescriptors; } @Override protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) { return new PropertyDescriptor.Builder() .name(propertyDescriptorName) .required(false) .addValidator(StandardValidators.createAttributeExpressionLanguageValidator(AttributeExpression.ResultType.STRING, true)) .addValidator(StandardValidators.ATTRIBUTE_KEY_PROPERTY_NAME_VALIDATOR) .expressionLanguageSupported(true) .dynamic(true) .build(); } private final PartialFunctions.InitConnection<FunctionContext, Connection> initConnection = (c, s, fc) -> { final Connection connection = c.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class).getConnection(); try { fc.originalAutoCommit = connection.getAutoCommit(); connection.setAutoCommit(false); String jdbcUrl = "DBCPService"; try { DatabaseMetaData databaseMetaData = connection.getMetaData(); if (databaseMetaData != null) { jdbcUrl = databaseMetaData.getURL(); } } catch (SQLException se) { // Ignore and use default JDBC URL. This shouldn't happen unless the driver doesn't implement getMetaData() properly } finally { fc.jdbcUrl = jdbcUrl; } } catch (SQLException e) { throw new ProcessException("Failed to disable auto commit due to " + e, e); } return connection; }; private final Put.PutFlowFile<FunctionContext, Connection> putFlowFile = (context, session, functionContext, conn, flowFile, result) -> { exceptionHandler.execute(functionContext, flowFile, inputFlowFile -> { // Get the statement type from the attribute if necessary final String statementTypeProperty = context.getProperty(STATEMENT_TYPE).getValue(); String statementType = statementTypeProperty; if (USE_ATTR_TYPE.equals(statementTypeProperty)) { statementType = inputFlowFile.getAttribute(STATEMENT_TYPE_ATTRIBUTE); } if (StringUtils.isEmpty(statementType)) { final String msg = format("Statement Type is not specified, FlowFile %s", inputFlowFile); throw new IllegalArgumentException(msg); } try (final InputStream in = session.read(inputFlowFile)) { final RecordReaderFactory recordParserFactory = context.getProperty(RECORD_READER_FACTORY) .asControllerService(RecordReaderFactory.class); final RecordReader recordParser = recordParserFactory.createRecordReader(inputFlowFile, in, getLogger()); if (SQL_TYPE.equalsIgnoreCase(statementType)) { executeSQL(context, session, inputFlowFile, functionContext, result, conn, recordParser); } else { final DMLSettings settings = new DMLSettings(context); executeDML(context, session, inputFlowFile, functionContext, result, conn, recordParser, statementType, settings); } } }, (fc, inputFlowFile, r, e) -> { getLogger().warn("Failed to process {} due to {}", new Object[]{inputFlowFile, e}, e); if (e instanceof BatchUpdateException) { try { // Although process session will move forward in order to route the failed FlowFile, // database transaction should be rolled back to avoid partial batch update. conn.rollback(); } catch (SQLException re) { getLogger().error("Failed to rollback database due to {}, transaction may be incomplete.", new Object[]{re}, re); } } // Embed Exception detail to FlowFile attribute then delegate error handling to default and rollbackOnFailure. final FlowFile flowFileWithAttributes = session.putAttribute(inputFlowFile, PUT_DATABASE_RECORD_ERROR, e.getMessage()); final ExceptionHandler.OnError<FunctionContext, FlowFile> defaultOnError = ExceptionHandler.createOnError(context, session, result, REL_FAILURE, REL_RETRY); final ExceptionHandler.OnError<FunctionContext, FlowFile> rollbackOnFailure = RollbackOnFailure.createOnError(defaultOnError); rollbackOnFailure.apply(fc, flowFileWithAttributes, r, e); }); }; @OnScheduled public void onScheduled(final ProcessContext context) { synchronized (this) { schemaCache.clear(); } process = new Put<>(); process.setLogger(getLogger()); process.initConnection(initConnection); process.putFlowFile(putFlowFile); process.adjustRoute(RollbackOnFailure.createAdjustRoute(REL_FAILURE, REL_RETRY)); process.onCompleted((c, s, fc, conn) -> { try { conn.commit(); } catch (SQLException e) { // Throw ProcessException to rollback process session. throw new ProcessException("Failed to commit database connection due to " + e, e); } }); process.onFailed((c, s, fc, conn, e) -> { try { conn.rollback(); } catch (SQLException re) { // Just log the fact that rollback failed. // ProcessSession will be rollback by the thrown Exception so don't have to do anything here. getLogger().warn("Failed to rollback database connection due to %s", new Object[]{re}, re); } }); process.cleanup((c, s, fc, conn) -> { // make sure that we try to set the auto commit back to whatever it was. if (fc.originalAutoCommit) { try { conn.setAutoCommit(true); } catch (final SQLException se) { getLogger().warn("Failed to reset autocommit due to {}", new Object[]{se}); } } }); exceptionHandler = new ExceptionHandler<>(); exceptionHandler.mapException(s -> { try { if (s == null) { return ErrorTypes.PersistentFailure; } throw s; } catch (IllegalArgumentException |MalformedRecordException |SQLNonTransientException e) { return ErrorTypes.InvalidInput; } catch (IOException |SQLException e) { return ErrorTypes.TemporalFailure; } catch (Exception e) { return ErrorTypes.UnknownFailure; } }); exceptionHandler.adjustError(RollbackOnFailure.createAdjustError(getLogger())); } private static class FunctionContext extends RollbackOnFailure { private final int queryTimeout; private boolean originalAutoCommit = false; private String jdbcUrl; public FunctionContext(boolean rollbackOnFailure, int queryTimeout) { super(rollbackOnFailure, true); this.queryTimeout = queryTimeout; } } static class DMLSettings { private final boolean translateFieldNames; private final boolean ignoreUnmappedFields; // Is the unmatched column behaviour fail or warning? private final boolean failUnmappedColumns; private final boolean warningUnmappedColumns; // Escape column names? private final boolean escapeColumnNames; // Quote table name? private final boolean quoteTableName; private DMLSettings(ProcessContext context) { translateFieldNames = context.getProperty(TRANSLATE_FIELD_NAMES).asBoolean(); ignoreUnmappedFields = IGNORE_UNMATCHED_FIELD.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_FIELD_BEHAVIOR).getValue()); failUnmappedColumns = FAIL_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue()); warningUnmappedColumns = WARNING_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue()); escapeColumnNames = context.getProperty(QUOTED_IDENTIFIERS).asBoolean(); quoteTableName = context.getProperty(QUOTED_TABLE_IDENTIFIER).asBoolean(); } } private void executeSQL(ProcessContext context, ProcessSession session, FlowFile flowFile, FunctionContext functionContext, RoutingResult result, Connection con, RecordReader recordParser) throws IllegalArgumentException, MalformedRecordException, IOException, SQLException { final RecordSchema recordSchema = recordParser.getSchema(); // Find which field has the SQL statement in it final String sqlField = context.getProperty(FIELD_CONTAINING_SQL).evaluateAttributeExpressions(flowFile).getValue(); if (StringUtils.isEmpty(sqlField)) { throw new IllegalArgumentException(format("SQL specified as Statement Type but no Field Containing SQL was found, FlowFile %s", flowFile)); } boolean schemaHasSqlField = recordSchema.getFields().stream().anyMatch((field) -> sqlField.equals(field.getFieldName())); if (!schemaHasSqlField) { throw new IllegalArgumentException(format("Record schema does not contain Field Containing SQL: %s, FlowFile %s", sqlField, flowFile)); } try (Statement s = con.createStatement()) { try { s.setQueryTimeout(functionContext.queryTimeout); // timeout in seconds } catch (SQLException se) { // If the driver doesn't support query timeout, then assume it is "infinite". Allow a timeout of zero only if (functionContext.queryTimeout > 0) { throw se; } } Record currentRecord; while ((currentRecord = recordParser.nextRecord()) != null) { Object sql = currentRecord.getValue(sqlField); if (sql == null || StringUtils.isEmpty((String) sql)) { throw new MalformedRecordException(format("Record had no (or null) value for Field Containing SQL: %s, FlowFile %s", sqlField, flowFile)); } // Execute the statement as-is s.execute((String) sql); } result.routeTo(flowFile, REL_SUCCESS); session.getProvenanceReporter().send(flowFile, functionContext.jdbcUrl); } } private void executeDML(ProcessContext context, ProcessSession session, FlowFile flowFile, FunctionContext functionContext, RoutingResult result, Connection con, RecordReader recordParser, String statementType, DMLSettings settings) throws IllegalArgumentException, MalformedRecordException, IOException, SQLException { final RecordSchema recordSchema = recordParser.getSchema(); final ComponentLog log = getLogger(); final String catalog = context.getProperty(CATALOG_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String updateKeys = context.getProperty(UPDATE_KEYS).evaluateAttributeExpressions(flowFile).getValue(); final SchemaKey schemaKey = new PutDatabaseRecord.SchemaKey(catalog, schemaName, tableName); // Ensure the table name has been set, the generated SQL statements (and TableSchema cache) will need it if (StringUtils.isEmpty(tableName)) { throw new IllegalArgumentException(format("Cannot process %s because Table Name is null or empty", flowFile)); } // Always get the primary keys if Update Keys is empty. Otherwise if we have an Insert statement first, the table will be // cached but the primary keys will not be retrieved, causing future UPDATE statements to not have primary keys available final boolean includePrimaryKeys = updateKeys == null; // get the database schema from the cache, if one exists. We do this in a synchronized block, rather than // using a ConcurrentMap because the Map that we are using is a LinkedHashMap with a capacity such that if // the Map grows beyond this capacity, old elements are evicted. We do this in order to avoid filling the // Java Heap if there are a lot of different SQL statements being generated that reference different tables. TableSchema tableSchema; synchronized (this) { tableSchema = schemaCache.get(schemaKey); if (tableSchema == null) { // No schema exists for this table yet. Query the database to determine the schema and put it into the cache. tableSchema = TableSchema.from(con, catalog, schemaName, tableName, settings.translateFieldNames, includePrimaryKeys); schemaCache.put(schemaKey, tableSchema); } } if (tableSchema == null) { throw new IllegalArgumentException("No table schema specified!"); } // build the fully qualified table name final StringBuilder tableNameBuilder = new StringBuilder(); if (catalog != null) { tableNameBuilder.append(catalog).append("."); } if (schemaName != null) { tableNameBuilder.append(schemaName).append("."); } tableNameBuilder.append(tableName); final String fqTableName = tableNameBuilder.toString(); if (recordSchema == null) { throw new IllegalArgumentException("No record schema specified!"); } final SqlAndIncludedColumns sqlHolder; if (INSERT_TYPE.equalsIgnoreCase(statementType)) { sqlHolder = generateInsert(recordSchema, fqTableName, tableSchema, settings); } else if (UPDATE_TYPE.equalsIgnoreCase(statementType)) { sqlHolder = generateUpdate(recordSchema, fqTableName, updateKeys, tableSchema, settings); } else if (DELETE_TYPE.equalsIgnoreCase(statementType)) { sqlHolder = generateDelete(recordSchema, fqTableName, tableSchema, settings); } else { throw new IllegalArgumentException(format("Statement Type %s is not valid, FlowFile %s", statementType, flowFile)); } try (PreparedStatement ps = con.prepareStatement(sqlHolder.getSql())) { final int queryTimeout = functionContext.queryTimeout; try { ps.setQueryTimeout(queryTimeout); // timeout in seconds } catch (SQLException se) { // If the driver doesn't support query timeout, then assume it is "infinite". Allow a timeout of zero only if (queryTimeout > 0) { throw se; } } Record currentRecord; List<Integer> fieldIndexes = sqlHolder.getFieldIndexes(); while ((currentRecord = recordParser.nextRecord()) != null) { Object[] values = currentRecord.getValues(); if (values != null) { if (fieldIndexes != null) { for (int i = 0; i < fieldIndexes.size(); i++) { // If DELETE type, insert the object twice because of the null check (see generateDelete for details) if (DELETE_TYPE.equalsIgnoreCase(statementType)) { ps.setObject(i * 2 + 1, values[fieldIndexes.get(i)]); ps.setObject(i * 2 + 2, values[fieldIndexes.get(i)]); } else { ps.setObject(i + 1, values[fieldIndexes.get(i)]); } } } else { // If there's no index map, assume all values are included and set them in order for (int i = 0; i < values.length; i++) { // If DELETE type, insert the object twice because of the null check (see generateDelete for details) if (DELETE_TYPE.equalsIgnoreCase(statementType)) { ps.setObject(i * 2 + 1, values[i]); ps.setObject(i * 2 + 2, values[i]); } else { ps.setObject(i + 1, values[i]); } } } ps.addBatch(); } } log.debug("Executing query {}", new Object[]{sqlHolder}); ps.executeBatch(); result.routeTo(flowFile, REL_SUCCESS); session.getProvenanceReporter().send(flowFile, functionContext.jdbcUrl); } } @Override public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFactory) throws ProcessException { final Boolean rollbackOnFailure = context.getProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE).asBoolean(); final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.SECONDS).intValue(); final FunctionContext functionContext = new FunctionContext(rollbackOnFailure, queryTimeout); RollbackOnFailure.onTrigger(context, sessionFactory, functionContext, getLogger(), session -> process.onTrigger(context, session, functionContext)); } private Set<String> getNormalizedColumnNames(final RecordSchema schema, final boolean translateFieldNames) { final Set<String> normalizedFieldNames = new HashSet<>(); if (schema != null) { schema.getFieldNames().forEach((fieldName) -> normalizedFieldNames.add(normalizeColumnName(fieldName, translateFieldNames))); } return normalizedFieldNames; } SqlAndIncludedColumns generateInsert(final RecordSchema recordSchema, final String tableName, final TableSchema tableSchema, final DMLSettings settings) throws IllegalArgumentException, SQLException { final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames); for (final String requiredColName : tableSchema.getRequiredColumnNames()) { final String normalizedColName = normalizeColumnName(requiredColName, settings.translateFieldNames); if (!normalizedFieldNames.contains(normalizedColName)) { String missingColMessage = "Record does not have a value for the Required column '" + requiredColName + "'"; if (settings.failUnmappedColumns) { getLogger().error(missingColMessage); throw new IllegalArgumentException(missingColMessage); } else if (settings.warningUnmappedColumns) { getLogger().warn(missingColMessage); } } } final StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder.append("INSERT INTO "); if (settings.quoteTableName) { sqlBuilder.append(tableSchema.getQuotedIdentifierString()) .append(tableName) .append(tableSchema.getQuotedIdentifierString()); } else { sqlBuilder.append(tableName); } sqlBuilder.append(" ("); // iterate over all of the fields in the record, building the SQL statement by adding the column names List<String> fieldNames = recordSchema.getFieldNames(); final List<Integer> includedColumns = new ArrayList<>(); if (fieldNames != null) { int fieldCount = fieldNames.size(); AtomicInteger fieldsFound = new AtomicInteger(0); for (int i = 0; i < fieldCount; i++) { RecordField field = recordSchema.getField(i); String fieldName = field.getFieldName(); final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames)); if (desc == null && !settings.ignoreUnmappedFields) { throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database"); } if (desc != null) { if (fieldsFound.getAndIncrement() > 0) { sqlBuilder.append(", "); } if (settings.escapeColumnNames) { sqlBuilder.append(tableSchema.getQuotedIdentifierString()) .append(desc.getColumnName()) .append(tableSchema.getQuotedIdentifierString()); } else { sqlBuilder.append(desc.getColumnName()); } includedColumns.add(i); } } // complete the SQL statements by adding ?'s for all of the values to be escaped. sqlBuilder.append(") VALUES ("); sqlBuilder.append(StringUtils.repeat("?", ",", fieldCount)); sqlBuilder.append(")"); if (fieldsFound.get() == 0) { throw new SQLDataException("None of the fields in the record map to the columns defined by the " + tableName + " table"); } } return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns); } SqlAndIncludedColumns generateUpdate(final RecordSchema recordSchema, final String tableName, final String updateKeys, final TableSchema tableSchema, final DMLSettings settings) throws IllegalArgumentException, MalformedRecordException, SQLException { final Set<String> updateKeyNames; if (updateKeys == null) { updateKeyNames = tableSchema.getPrimaryKeyColumnNames(); } else { updateKeyNames = new HashSet<>(); for (final String updateKey : updateKeys.split(",")) { updateKeyNames.add(updateKey.trim()); } } if (updateKeyNames.isEmpty()) { throw new SQLIntegrityConstraintViolationException("Table '" + tableName + "' does not have a Primary Key and no Update Keys were specified"); } final StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder.append("UPDATE "); if (settings.quoteTableName) { sqlBuilder.append(tableSchema.getQuotedIdentifierString()) .append(tableName) .append(tableSchema.getQuotedIdentifierString()); } else { sqlBuilder.append(tableName); } // Create a Set of all normalized Update Key names, and ensure that there is a field in the record // for each of the Update Key fields. final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames); final Set<String> normalizedUpdateNames = new HashSet<>(); for (final String uk : updateKeyNames) { final String normalizedUK = normalizeColumnName(uk, settings.translateFieldNames); normalizedUpdateNames.add(normalizedUK); if (!normalizedFieldNames.contains(normalizedUK)) { String missingColMessage = "Record does not have a value for the " + (updateKeys == null ? "Primary" : "Update") + "Key column '" + uk + "'"; if (settings.failUnmappedColumns) { getLogger().error(missingColMessage); throw new MalformedRecordException(missingColMessage); } else if (settings.warningUnmappedColumns) { getLogger().warn(missingColMessage); } } } // iterate over all of the fields in the record, building the SQL statement by adding the column names List<String> fieldNames = recordSchema.getFieldNames(); final List<Integer> includedColumns = new ArrayList<>(); if (fieldNames != null) { sqlBuilder.append(" SET "); int fieldCount = fieldNames.size(); AtomicInteger fieldsFound = new AtomicInteger(0); for (int i = 0; i < fieldCount; i++) { RecordField field = recordSchema.getField(i); String fieldName = field.getFieldName(); final String normalizedColName = normalizeColumnName(fieldName, settings.translateFieldNames); final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames)); if (desc == null) { if (!settings.ignoreUnmappedFields) { throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database"); } else { continue; } } // Check if this column is an Update Key. If so, skip it for now. We will come // back to it after we finish the SET clause if (!normalizedUpdateNames.contains(normalizedColName)) { if (fieldsFound.getAndIncrement() > 0) { sqlBuilder.append(", "); } if (settings.escapeColumnNames) { sqlBuilder.append(tableSchema.getQuotedIdentifierString()) .append(desc.getColumnName()) .append(tableSchema.getQuotedIdentifierString()); } else { sqlBuilder.append(desc.getColumnName()); } sqlBuilder.append(" = ?"); includedColumns.add(i); } } // Set the WHERE clause based on the Update Key values sqlBuilder.append(" WHERE "); AtomicInteger whereFieldCount = new AtomicInteger(0); for (int i = 0; i < fieldCount; i++) { RecordField field = recordSchema.getField(i); String fieldName = field.getFieldName(); final String normalizedColName = normalizeColumnName(fieldName, settings.translateFieldNames); final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames)); if (desc != null) { // Check if this column is a Update Key. If so, add it to the WHERE clause if (normalizedUpdateNames.contains(normalizedColName)) { if (whereFieldCount.getAndIncrement() > 0) { sqlBuilder.append(" AND "); } if (settings.escapeColumnNames) { sqlBuilder.append(tableSchema.getQuotedIdentifierString()) .append(normalizedColName) .append(tableSchema.getQuotedIdentifierString()); } else { sqlBuilder.append(normalizedColName); } sqlBuilder.append(" = ?"); includedColumns.add(i); } } } } return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns); } SqlAndIncludedColumns generateDelete(final RecordSchema recordSchema, final String tableName, final TableSchema tableSchema, final DMLSettings settings) throws IllegalArgumentException, MalformedRecordException, SQLDataException { final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames); for (final String requiredColName : tableSchema.getRequiredColumnNames()) { final String normalizedColName = normalizeColumnName(requiredColName, settings.translateFieldNames); if (!normalizedFieldNames.contains(normalizedColName)) { String missingColMessage = "Record does not have a value for the Required column '" + requiredColName + "'"; if (settings.failUnmappedColumns) { getLogger().error(missingColMessage); throw new MalformedRecordException(missingColMessage); } else if (settings.warningUnmappedColumns) { getLogger().warn(missingColMessage); } } } final StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder.append("DELETE FROM "); if (settings.quoteTableName) { sqlBuilder.append(tableSchema.getQuotedIdentifierString()) .append(tableName) .append(tableSchema.getQuotedIdentifierString()); } else { sqlBuilder.append(tableName); } // iterate over all of the fields in the record, building the SQL statement by adding the column names List<String> fieldNames = recordSchema.getFieldNames(); final List<Integer> includedColumns = new ArrayList<>(); if (fieldNames != null) { sqlBuilder.append(" WHERE "); int fieldCount = fieldNames.size(); AtomicInteger fieldsFound = new AtomicInteger(0); for (int i = 0; i < fieldCount; i++) { RecordField field = recordSchema.getField(i); String fieldName = field.getFieldName(); final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames)); if (desc == null && !settings.ignoreUnmappedFields) { throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database"); } if (desc != null) { if (fieldsFound.getAndIncrement() > 0) { sqlBuilder.append(" AND "); } String columnName; if (settings.escapeColumnNames) { columnName = tableSchema.getQuotedIdentifierString() + desc.getColumnName() + tableSchema.getQuotedIdentifierString(); } else { columnName = desc.getColumnName(); } // Need to build a null-safe construct for the WHERE clause, since we are using PreparedStatement and won't know if the values are null. If they are null, // then the filter should be "column IS null" vs "column = null". Since we don't know whether the value is null, we can use the following construct (from NIFI-3742): // (column = ? OR (column is null AND ? is null)) sqlBuilder.append("("); sqlBuilder.append(columnName); sqlBuilder.append(" = ? OR ("); sqlBuilder.append(columnName); sqlBuilder.append(" is null AND ? is null))"); includedColumns.add(i); } } if (fieldsFound.get() == 0) { throw new SQLDataException("None of the fields in the record map to the columns defined by the " + tableName + " table"); } } return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns); } private static String normalizeColumnName(final String colName, final boolean translateColumnNames) { return colName == null ? null : (translateColumnNames ? colName.toUpperCase().replace("_", "") : colName); } static class TableSchema { private List<String> requiredColumnNames; private Set<String> primaryKeyColumnNames; private Map<String, ColumnDescription> columns; private String quotedIdentifierString; private TableSchema(final List<ColumnDescription> columnDescriptions, final boolean translateColumnNames, final Set<String> primaryKeyColumnNames, final String quotedIdentifierString) { this.columns = new HashMap<>(); this.primaryKeyColumnNames = primaryKeyColumnNames; this.quotedIdentifierString = quotedIdentifierString; this.requiredColumnNames = new ArrayList<>(); for (final ColumnDescription desc : columnDescriptions) { columns.put(normalizeColumnName(desc.columnName, translateColumnNames), desc); if (desc.isRequired()) { requiredColumnNames.add(desc.columnName); } } } public Map<String, ColumnDescription> getColumns() { return columns; } public List<String> getRequiredColumnNames() { return requiredColumnNames; } public Set<String> getPrimaryKeyColumnNames() { return primaryKeyColumnNames; } public String getQuotedIdentifierString() { return quotedIdentifierString; } public static TableSchema from(final Connection conn, final String catalog, final String schema, final String tableName, final boolean translateColumnNames, final boolean includePrimaryKeys) throws SQLException { final DatabaseMetaData dmd = conn.getMetaData(); try (final ResultSet colrs = dmd.getColumns(catalog, schema, tableName, "%")) { final List<ColumnDescription> cols = new ArrayList<>(); while (colrs.next()) { final ColumnDescription col = ColumnDescription.from(colrs); cols.add(col); } final Set<String> primaryKeyColumns = new HashSet<>(); if (includePrimaryKeys) { try (final ResultSet pkrs = dmd.getPrimaryKeys(catalog, null, tableName)) { while (pkrs.next()) { final String colName = pkrs.getString("COLUMN_NAME"); primaryKeyColumns.add(normalizeColumnName(colName, translateColumnNames)); } } } return new TableSchema(cols, translateColumnNames, primaryKeyColumns, dmd.getIdentifierQuoteString()); } } } protected static class ColumnDescription { private final String columnName; private final int dataType; private final boolean required; private final Integer columnSize; public ColumnDescription(final String columnName, final int dataType, final boolean required, final Integer columnSize) { this.columnName = columnName; this.dataType = dataType; this.required = required; this.columnSize = columnSize; } public int getDataType() { return dataType; } public Integer getColumnSize() { return columnSize; } public String getColumnName() { return columnName; } public boolean isRequired() { return required; } public static ColumnDescription from(final ResultSet resultSet) throws SQLException { final ResultSetMetaData md = resultSet.getMetaData(); List<String> columns = new ArrayList<>(); for (int i = 1; i < md.getColumnCount() + 1; i++) { columns.add(md.getColumnName(i)); } final String columnName = resultSet.getString("COLUMN_NAME"); final int dataType = resultSet.getInt("DATA_TYPE"); final int colSize = resultSet.getInt("COLUMN_SIZE"); final String nullableValue = resultSet.getString("IS_NULLABLE"); final boolean isNullable = "YES".equalsIgnoreCase(nullableValue) || nullableValue.isEmpty(); final String defaultValue = resultSet.getString("COLUMN_DEF"); String autoIncrementValue = "NO"; if (columns.contains("IS_AUTOINCREMENT")) { autoIncrementValue = resultSet.getString("IS_AUTOINCREMENT"); } final boolean isAutoIncrement = "YES".equalsIgnoreCase(autoIncrementValue); final boolean required = !isNullable && !isAutoIncrement && defaultValue == null; return new ColumnDescription(columnName, dataType, required, colSize == 0 ? null : colSize); } } static class SchemaKey { private final String catalog; private final String schemaName; private final String tableName; public SchemaKey(final String catalog, final String schemaName, final String tableName) { this.catalog = catalog; this.schemaName = schemaName; this.tableName = tableName; } @Override public int hashCode() { int result = catalog != null ? catalog.hashCode() : 0; result = 31 * result + (schemaName != null ? schemaName.hashCode() : 0); result = 31 * result + tableName.hashCode(); return result; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SchemaKey schemaKey = (SchemaKey) o; if (catalog != null ? !catalog.equals(schemaKey.catalog) : schemaKey.catalog != null) return false; if (schemaName != null ? !schemaName.equals(schemaKey.schemaName) : schemaKey.schemaName != null) return false; return tableName.equals(schemaKey.tableName); } } /** * A holder class for a SQL prepared statement and a BitSet indicating which columns are being updated (to determine which values from the record to set on the statement) * A value of null for getIncludedColumns indicates that all columns/fields should be included. */ static class SqlAndIncludedColumns { String sql; List<Integer> fieldIndexes; /** * Constructor * * @param sql The prepared SQL statement (including parameters notated by ? ) * @param fieldIndexes A List of record indexes. The index of the list is the location of the record field in the SQL prepared statement */ public SqlAndIncludedColumns(String sql, List<Integer> fieldIndexes) { this.sql = sql; this.fieldIndexes = fieldIndexes; } public String getSql() { return sql; } public List<Integer> getFieldIndexes() { return fieldIndexes; } } }
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.framework.components.tabletag; import static junitx.framework.StringAssert.assertContains; import static junitx.framework.StringAssert.assertNotContains; import static org.mifos.framework.TestUtils.assertWellFormedFragment; import java.util.Locale; import javax.servlet.jsp.JspException; import junit.framework.Assert; import junit.framework.TestCase; import org.mifos.accounts.loan.util.helpers.LoanConstants; import org.mifos.accounts.loan.util.helpers.RequestConstants; import org.mifos.config.Localization; import org.mifos.customers.business.CustomerSearch; import org.mifos.customers.util.helpers.CustomerSearchConstants; import org.mifos.framework.exceptions.TableTagException; import org.mifos.framework.exceptions.TableTagTypeParserException; import org.mifos.framework.util.helpers.SearchObject; import org.testng.annotations.Test; @Test(groups={"unit", "fastTestsSuite"}, dependsOnGroups={"productMixTestSuite"}) public class TableTagTest extends TestCase { public void testNoResults() throws Exception { String html = new TableTag("single").noResults("default-office", TableTag.ALL_BRANCHES, "Rock&Roll") .getOutput(); assertContains("No results found", html); assertContains("Rock&amp;Roll", html); assertContains("All Branches", html); assertNotContains("office-one", html); assertNotContains("default-office", html); assertWellFormedFragment(html); } public void testNoResultsMultiple() throws Exception { SearchObject searchObject = new SearchObject(); searchObject.addToSearchNodeMap("dummy-search-term-key", "Rock"); searchObject.addToSearchNodeMap(CustomerSearchConstants.CUSTOMER_SEARCH_OFFICE_ID, "office-one"); String html = new TableTag("multiple").noResults("the-office-name", "office-one", "Rock").getOutput(); assertContains("No results found", html); assertContains("Rock", html); assertNotContains("All Branches", html); assertContains("the-office-name", html); assertNotContains("office-one", html); assertWellFormedFragment(html); } public void testNoResultsNotAllBranches() throws Exception { SearchObject searchObject = new SearchObject(); searchObject.addToSearchNodeMap("dummy-search-term-key", "Rock"); searchObject.addToSearchNodeMap(CustomerSearchConstants.CUSTOMER_SEARCH_OFFICE_ID, ""); String html = new TableTag("multiple").noResults("the-office-name", "", "Rock").getOutput(); assertContains("No results found", html); assertContains("Rock", html); assertNotContains("All Branches", html); assertContains("the-office-name", html); assertWellFormedFragment(html); } public void testCreateEndTable() { StringBuilder stringBuilder = new StringBuilder(); new TableTag("single").createEndTable(stringBuilder, true); assertContains("<img src=\"pages/framework/images/trans.gif \" width=\"10\" height=\"5\"></td></tr>", stringBuilder.toString()); new TableTag("single").createEndTable(stringBuilder, false); assertContains("<img src=\"pages/framework/images/trans.gif \" width=\"5\" height=\"3\"></td></tr>", stringBuilder.toString()); } public void testGetSingleFileFailure() throws Exception { try { new TableTag("single").getSingleFile(); fail(); } catch (JspException e) { assertTrue(true); } } public void testGetSingleFile() throws Exception { Locale locale = Localization.getInstance().getMainLocale(); TableTag tableTag = new TableTag("single"); tableTag.setName("viewUsers"); Assert.assertEquals("org/mifos/framework/util/resources/tabletag/viewUsers.xml", tableTag.getSingleFile()); } public void testParser() throws Exception { Files files = TypeParser.getInstance().parser("org/mifos/framework/util/resources/tabletag/type.xml"); Assert.assertNotNull(files); FileName[] file = files.getFileName(); Assert.assertNotNull(file); Assert.assertEquals("1", file[0].getName()); Assert.assertEquals("org/mifos/framework/util/resources/tabletag/CustomerClient.xml", file[0].getPath()); } public void testGetDisplayText() throws Exception { Assert.assertEquals("<span class=\"fontnormalbold\">a</span>,<span class=\"fontnormalbold\">b</span>", Text .getDisplayText(new String[] { "a", "b" }, "true")); Assert.assertEquals("", Text.getDisplayText(new String[] { "", "" }, "true")); Assert.assertEquals("<span class=\"fontnormal\">a</span>,<span class=\"fontnormal\">b</span>", Text.getDisplayText( new String[] { "a", "b" }, "false")); } public void testGetImage() throws Exception { Locale locale = Localization.getInstance().getMainLocale(); CustomerSearch customerSearch = new CustomerSearch(); Assert.assertEquals( "<span class=\"fontnormal\">&nbsp;<img src=pages/framework/images/status_yellow.gif width=\"8\" height=\"9\"></span><span class=\"fontnormal\">&nbsp;PartialApplication</span>", Text.getImage(customerSearch, "1", locale)); customerSearch.setCustomerType(Short.valueOf("4")); Assert.assertEquals( "<span class=\"fontnormal\">&nbsp;<img src=pages/framework/images/status_yellow.gif width=\"8\" height=\"9\"></span><span class=\"fontnormal\">&nbsp;Pending Approval</span>", Text.getImage(customerSearch, "2", locale)); customerSearch.setCustomerType(Short.valueOf("6")); Assert.assertEquals( "<span class=\"fontnormal\">&nbsp;<img src=pages/framework/images/status_yellow.gif width=\"8\" height=\"9\"></span><span class=\"fontnormal\">&nbsp;Partial Application</span>", Text.getImage(customerSearch, "13", locale)); } public void testTableTagParser() throws Exception { Table table = TableTagParser.getInstance().parser("org/mifos/framework/util/resources/tabletag/viewUsers.xml"); Path path[] = table.getPath(); for (Path element : path) { Assert.assertEquals("PersonAction.do", element.getAction()); Assert.assertEquals("search_success", element.getForwardkey()); Assert.assertEquals("viewUsers", element.getKey()); } for (Row row : table.getRow()) { Assert.assertEquals("false", row.getTdrequired()); int i = 0; for (Column column : row.getColumn()) { if (i++ == 1) { Assert.assertEquals("PersonAction.do", column.getAction()); Assert.assertEquals("true", column.getBoldlabel()); Assert.assertEquals(null, column.getCheckLinkOptionalRequired()); Assert.assertEquals("false", column.getImage()); Assert.assertEquals("false", column.getIsLinkOptional()); Assert.assertEquals("/", column.getLabel()); Assert.assertEquals("string", column.getLabeltype()); DisplayName displayName = column.getDisplayname(); Assert.assertEquals("true", displayName.getBold()); for (Fragment fragment : displayName.getFragment()) { Assert.assertEquals("true", fragment.getBold()); Assert.assertEquals("personnelName", fragment.getFragmentName()); Assert.assertEquals("method", fragment.getFragmentType()); Assert.assertEquals("false", fragment.getItalic()); } Parameters parameters = column.getParameters(); int j = 0; for (Param param : parameters.getParam()) { if (j++ == 1) { Assert.assertEquals("method", param.getParameterName()); Assert.assertEquals("get", param.getParameterValue()); Assert.assertEquals("string", param.getParameterValueType()); } } } } } PageRequirements pageRequirements = table.getPageRequirements(); Assert.assertEquals("false", pageRequirements.getBlanklinerequired()); Assert.assertEquals("true", pageRequirements.getBluelineRequired()); Assert.assertEquals("false", pageRequirements.getBottombluelineRequired()); Assert.assertEquals("false", pageRequirements.getFlowRequired()); Assert.assertEquals("false", pageRequirements.getHeadingRequired()); Assert.assertEquals("true", pageRequirements.getNumbersRequired()); Assert.assertEquals("true", pageRequirements.getTopbluelineRequired()); Assert.assertEquals("false", pageRequirements.getValignnumbers()); } public void testHelperCache() throws Exception { TableTag tableTag = new TableTag("single"); tableTag.setName("viewUsers"); Assert.assertNotNull(tableTag.helperCache("org/mifos/framework/util/resources/tabletag/viewUsers.xml", "viewUsers")); } public void testPageScroll() { Locale locale = Localization.getInstance().getMainLocale(); Assert.assertEquals("<a href='hRef?method=load&currentFlowKey=1234&current=1'>text</a>", PageScroll.getAnchor("hRef", "text", "load", "1234", 1, null)); Assert.assertEquals( "<tr><td width=\"20%\" class=\"fontnormalboldgray\">Previous</td><td width=\"40%\" align=\"center\" class=\"fontnormalbold\">Results 1-10 of 100 </td><td width=\"20%\" class=\"fontnormalbold\"><a href='loaad?method=searchNext&currentFlowKey=1234&current=2'>Next</a></td></tr>", PageScroll.getPages(1, 10, 100, "loaad", "1234", locale, null)); Assert.assertEquals( "<tr><td width=\"20%\" class=\"fontnormalbold\"><a href='loaad?method=searchPrev&currentFlowKey=1234&current=4'>Previous</a></td><td width=\"40%\" align=\"center\" class=\"fontnormalbold\">Results 41-50 of 100 </td><td width=\"20%\" class=\"fontnormalbold\"><a href='loaad?method=searchNext&currentFlowKey=1234&current=6'>Next</a></td></tr>", PageScroll.getPages(5, 10, 100, "loaad", "1234", locale, null)); Assert.assertEquals( "<tr><td width=\"20%\" class=\"fontnormalboldgray\">Previous</td><td width=\"40%\" align=\"center\" class=\"fontnormalbold\">Results 1-3 of 3 </td><td width=\"20%\" align=\"right\" class=\"fontnormalboldgray\">Next</td></tr>", PageScroll.getPages(1, 10, 3, "loaad", "1234", locale, null)); } public void testPageScrollgetAnchor() { Locale locale = Localization.getInstance().getMainLocale(); Assert.assertEquals("<a href='hRef?method=load&currentFlowKey=1234&current=1'>text</a>", PageScroll.getAnchor("hRef", "text", "load", "1234", 1, null)); Assert.assertEquals("<a href='hRef?method=load&currentFlowKey=1234&current=1&" + RequestConstants.PERSPECTIVE + "=" + LoanConstants.PERSPECTIVE_VALUE_REDO_LOAN + "'>text</a>", PageScroll.getAnchor("hRef", "text", "load", "1234", 1, LoanConstants.PERSPECTIVE_VALUE_REDO_LOAN)); } public void testLink() { Assert.assertEquals("", Link.createLink(new String[] { "" }, null, null, null, null, null, null)); Assert.assertEquals( "<span class=\"fontnormalbold\"><a href= \"load?X&currentFlowKey=1234&randomNUm=9999\">a</a></span>,<span class=\"fontnormalbold\"><a href= \"load?Y&currentFlowKey=1234&randomNUm=9999\">b</a></span>", Link.createLink(new String[] { "a", "b" }, new String[] { "X", "Y" }, "true", "load", "fontnormalbold", "1234", "9999")); Assert.assertEquals( "<span class=\"headingblue\"><a href= \"load?X\"&currentFlowKey=1234&randomNUm=9999class=\"headingblue\">a</a></span>,<span class=\"headingblue\"><a href= \"load?Y\"&currentFlowKey=1234&randomNUm=9999class=\"headingblue\">b</a></span>", Link.createLink(new String[] { "a", "b" }, new String[] { "X", "Y" }, "true", "load", "headingblue", "1234", "9999")); Assert.assertEquals( "<span><a href= \"load?X&currentFlowKey=1234&randomNUm=9999\">a</a></span>,<span><a href= \"load?Y&currentFlowKey=1234&randomNUm=9999\">b</a></span>", Link.createLink(new String[] { "a", "b" }, new String[] { "X", "Y" }, "true", "load", null, "1234", "9999")); } public void testTableTagTypeParserException() throws Exception { try { TypeParser.getInstance().parser("org/mifos/framework/components/tabletag/type.xml"); Assert.fail(); } catch (TableTagTypeParserException tttpe) { Assert.assertEquals("exception.framework.SystemException.TypeParseException", tttpe.getKey()); } } public void testTableTagException() throws Exception { try { Locale locale = Localization.getInstance().getMainLocale(); Text.getImage(this, "name", locale); Assert.fail(); } catch (TableTagException tte) { Assert.assertEquals("exception.framework.TableTagException", tte.getKey()); } } public void testTabletag() { TableTag tableTag = new TableTag(); tableTag.setClassName("myclass"); Assert.assertEquals("myclass", tableTag.getClassName()); tableTag.setType("mytype"); Assert.assertEquals("mytype", tableTag.getType()); tableTag.setName("myname"); Assert.assertEquals("myname", tableTag.getName()); tableTag.setBorder("myborder"); Assert.assertEquals("myborder", tableTag.getBorder()); tableTag.setCellpadding("mycellpading"); Assert.assertEquals("mycellpading", tableTag.getCellpadding()); tableTag.setCellspacing("mycellspacing"); Assert.assertEquals("mycellspacing", tableTag.getCellspacing()); tableTag.setWidth("mywidth"); Assert.assertEquals("mywidth", tableTag.getWidth()); tableTag.setKey("mykey"); Assert.assertEquals("mykey", tableTag.getKey()); tableTag.release(); Assert.assertEquals(1, tableTag.current); Assert.assertEquals(0, tableTag.size); } public void testSearchObject() throws Exception { SearchObject searchObject = new SearchObject(); searchObject.addSearchTermAndOffice("newSearchTerm", "1"); Assert.assertEquals("newSearchTerm", searchObject.getFromSearchNodeMap("dummy-search-term-key")); searchObject.setSearchNodeMap(null); Assert.assertNull(searchObject.getSearchNodeMap()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.io.util; import java.io.DataOutput; import java.io.IOException; import java.io.UTFDataFormatException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; import org.apache.cassandra.utils.memory.MemoryUtil; import com.google.common.base.Function; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Base class for DataOutput implementations that does not have an optimized implementations of Plus methods * and does no buffering. * <p> * Unlike BufferedDataOutputStreamPlus this is capable of operating as an unbuffered output stream. * Currently necessary because SequentialWriter implements its own buffering along with mark/reset/truncate. * </p> */ public abstract class UnbufferedDataOutputStreamPlus extends DataOutputStreamPlus { private static final Logger LOGGER = LoggerFactory.getLogger(UnbufferedDataOutputStreamPlus.class); private static final byte[] zeroBytes = new byte[2]; protected UnbufferedDataOutputStreamPlus() { super(); } protected UnbufferedDataOutputStreamPlus(WritableByteChannel channel) { super(channel); } /* !! DataOutput methods below are copied from the implementation in Apache Harmony RandomAccessFile. */ /** * Writes the entire contents of the byte array <code>buffer</code> to * this RandomAccessFile starting at the current file pointer. * * @param buffer the buffer to be written. * @throws IOException If an error occurs trying to write to this RandomAccessFile. */ public void write(byte[] buffer) throws IOException { write(buffer, 0, buffer.length); } /** * Writes <code>count</code> bytes from the byte array <code>buffer</code> * starting at <code>offset</code> to this RandomAccessFile starting at * the current file pointer.. * * @param buffer the bytes to be written * @param offset offset in buffer to get bytes * @param count number of bytes in buffer to write * @throws IOException If an error occurs attempting to write to this * RandomAccessFile. * @throws IndexOutOfBoundsException If offset or count are outside of bounds. */ public abstract void write(byte[] buffer, int offset, int count) throws IOException; /** * Writes the specified byte <code>oneByte</code> to this RandomAccessFile * starting at the current file pointer. Only the low order byte of * <code>oneByte</code> is written. * * @param oneByte the byte to be written * @throws IOException If an error occurs attempting to write to this * RandomAccessFile. */ public abstract void write(int oneByte) throws IOException; /** * Writes a boolean to this output stream. * * @param val the boolean value to write to the OutputStream * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeBoolean(boolean val) throws IOException { write(val ? 1 : 0); } /** * Writes a 8-bit byte to this output stream. * * @param val the byte value to write to the OutputStream * @throws java.io.IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeByte(int val) throws IOException { write(val & 0xFF); } /** * Writes the low order 8-bit bytes from a String to this output stream. * * @param str the String containing the bytes to write to the OutputStream * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeBytes(String str) throws IOException { byte bytes[] = new byte[str.length()]; for (int index = 0; index < str.length(); index++) { bytes[index] = (byte) (str.charAt(index) & 0xFF); } write(bytes); } /** * Writes the specified 16-bit character to the OutputStream. Only the lower * 2 bytes are written with the higher of the 2 bytes written first. This * represents the Unicode value of val. * * @param val the character to be written * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeChar(int val) throws IOException { write((val >>> 8) & 0xFF); write((val >>> 0) & 0xFF); } /** * Writes the specified 16-bit characters contained in str to the * OutputStream. Only the lower 2 bytes of each character are written with * the higher of the 2 bytes written first. This represents the Unicode * value of each character in str. * * @param str the String whose characters are to be written. * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeChars(String str) throws IOException { byte newBytes[] = new byte[str.length() * 2]; for (int index = 0; index < str.length(); index++) { int newIndex = index == 0 ? index : index * 2; newBytes[newIndex] = (byte) ((str.charAt(index) >> 8) & 0xFF); newBytes[newIndex + 1] = (byte) (str.charAt(index) & 0xFF); } write(newBytes); } /** * Writes a 64-bit double to this output stream. The resulting output is the * 8 bytes resulting from calling Double.doubleToLongBits(). * * @param val the double to be written. * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeDouble(double val) throws IOException { writeLong(Double.doubleToLongBits(val)); } /** * Writes a 32-bit float to this output stream. The resulting output is the * 4 bytes resulting from calling Float.floatToIntBits(). * * @param val the float to be written. * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeFloat(float val) throws IOException { writeInt(Float.floatToIntBits(val)); } /** * Writes a 32-bit int to this output stream. The resulting output is the 4 * bytes, highest order first, of val. * * @param val the int to be written. * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public void writeInt(int val) throws IOException { write((val >>> 24) & 0xFF); write((val >>> 16) & 0xFF); write((val >>> 8) & 0xFF); write((val >>> 0) & 0xFF); } /** * Writes a 64-bit long to this output stream. The resulting output is the 8 * bytes, highest order first, of val. * * @param val the long to be written. * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public void writeLong(long val) throws IOException { write((int) (val >>> 56) & 0xFF); write((int) (val >>> 48) & 0xFF); write((int) (val >>> 40) & 0xFF); write((int) (val >>> 32) & 0xFF); write((int) (val >>> 24) & 0xFF); write((int) (val >>> 16) & 0xFF); write((int) (val >>> 8) & 0xFF); write((int) (val >>> 0) & 0xFF); } /** * Writes the specified 16-bit short to the OutputStream. Only the lower 2 * bytes are written with the higher of the 2 bytes written first. * * @param val the short to be written * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public void writeShort(int val) throws IOException { writeChar(val); } /** * Writes the specified String out in UTF format to the provided DataOutput * * @param str the String to be written in UTF format. * @param out the DataOutput to write the UTF encoded string to * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public static void writeUTF(String str, DataOutput out) throws IOException { int length = str.length(); if (length == 0) { out.write(zeroBytes); return; } int utfCount = 0; int maxSize = 2; for (int i = 0 ; i < length ; i++) { int ch = str.charAt(i); if ((ch > 0) & (ch <= 127)) utfCount += 1; else if (ch <= 2047) utfCount += 2; else utfCount += maxSize = 3; } if (utfCount > 65535) { LOGGER.warn("Write UTF string with length {} > 65535 bytes", utfCount); throw new UTFDataFormatException(); //$NON-NLS-1$ } byte[] utfBytes = retrieveTemporaryBuffer(utfCount + 2); int bufferLength = utfBytes.length; if (utfCount == length) { utfBytes[0] = (byte) (utfCount >> 8); utfBytes[1] = (byte) utfCount; int firstIndex = 2; for (int offset = 0 ; offset < length ; offset += bufferLength) { int runLength = Math.min(bufferLength - firstIndex, length - offset) + firstIndex; offset -= firstIndex; for (int i = firstIndex ; i < runLength; i++) utfBytes[i] = (byte) str.charAt(offset + i); out.write(utfBytes, 0, runLength); firstIndex = 0; } } else { int utfIndex = 2; int offset = 0; utfBytes[0] = (byte) (utfCount >> 8); utfBytes[1] = (byte) utfCount; while (length > 0) { int charRunLength = (utfBytes.length - utfIndex) / maxSize; if (charRunLength < 128 && charRunLength < length) { out.write(utfBytes, 0, utfIndex); utfIndex = 0; } if (charRunLength > length) charRunLength = length; for (int i = 0 ; i < charRunLength ; i++) { char ch = str.charAt(offset + i); if ((ch > 0) && (ch <= 127)) { utfBytes[utfIndex++] = (byte) ch; } else if (ch <= 2047) { utfBytes[utfIndex++] = (byte) (0xc0 | (0x1f & (ch >> 6))); utfBytes[utfIndex++] = (byte) (0x80 | (0x3f & ch)); } else { utfBytes[utfIndex++] = (byte) (0xe0 | (0x0f & (ch >> 12))); utfBytes[utfIndex++] = (byte) (0x80 | (0x3f & (ch >> 6))); utfBytes[utfIndex++] = (byte) (0x80 | (0x3f & ch)); } } offset += charRunLength; length -= charRunLength; } out.write(utfBytes, 0, utfIndex); } } /** * Writes the specified String out in UTF format. * * @param str the String to be written in UTF format. * @throws IOException If an error occurs attempting to write to this * DataOutputStream. */ public final void writeUTF(String str) throws IOException { writeUTF(str, this); } // ByteBuffer to use for defensive copies private final ByteBuffer hollowBufferD = MemoryUtil.getHollowDirectByteBuffer(); @Override public void write(ByteBuffer buf) throws IOException { if (buf.hasArray()) { write(buf.array(), buf.arrayOffset() + buf.position(), buf.remaining()); } else { assert buf.isDirect(); MemoryUtil.duplicateDirectByteBuffer(buf, hollowBufferD); while (hollowBufferD.hasRemaining()) channel.write(hollowBufferD); } } public void write(Memory memory, long offset, long length) throws IOException { for (ByteBuffer buffer : memory.asByteBuffers(offset, length)) write(buffer); } @Override public <R> R applyToChannel(Function<WritableByteChannel, R> f) throws IOException { return f.apply(channel); } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java; import com.facebook.buck.jvm.core.SuggestBuildRules; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.rules.AbstractBuildRule; import com.facebook.buck.rules.AddToRuleKey; import com.facebook.buck.rules.BinaryBuildRule; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.BuildableContext; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.Tool; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.MkdirStep; import com.facebook.buck.step.fs.SymlinkFileStep; import com.facebook.buck.step.fs.WriteFileStep; import com.facebook.buck.zip.ZipStep; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import com.google.common.io.ByteSource; import com.google.common.io.Resources; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; import java.util.Set; /** * Build a fat JAR that packages an inner JAR along with any required native libraries. */ public class JarFattener extends AbstractBuildRule implements BinaryBuildRule { private static final String FAT_JAR_INNER_JAR = "inner.jar"; private static final String FAT_JAR_NATIVE_LIBRARY_RESOURCE_ROOT = "nativelibs"; public static final ImmutableList<String> FAT_JAR_SRC_RESOURCES = ImmutableList.of( "com/facebook/buck/jvm/java/FatJar.java", "com/facebook/buck/util/exportedfiles/Nullable.java", "com/facebook/buck/util/exportedfiles/Preconditions.java" ); public static final String FAT_JAR_MAIN_SRC_RESOURCE = "com/facebook/buck/jvm/java/FatJarMain.java"; private final JavacOptions javacOptions; @AddToRuleKey private final SourcePath innerJar; @AddToRuleKey private final ImmutableMap<String, SourcePath> nativeLibraries; private final Path output; public JarFattener( BuildRuleParams params, SourcePathResolver resolver, JavacOptions javacOptions, SourcePath innerJar, ImmutableMap<String, SourcePath> nativeLibraries) { super(params, resolver); this.javacOptions = javacOptions; this.innerJar = innerJar; this.nativeLibraries = nativeLibraries; this.output = BuildTargets.getScratchPath(getBuildTarget(), "%s") .resolve(getBuildTarget().getShortName() + ".jar"); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { ImmutableList.Builder<Step> steps = ImmutableList.builder(); Path outputDir = getOutputDirectory(); Path fatJarDir = outputDir.resolve("fat-jar-directory"); steps.add(new MakeCleanDirectoryStep(getProjectFilesystem(), outputDir)); // Map of the system-specific shared library name to it's resource name as a string. ImmutableMap.Builder<String, String> sonameToResourceMapBuilder = ImmutableMap.builder(); for (Map.Entry<String, SourcePath> entry : nativeLibraries.entrySet()) { String resource = FAT_JAR_NATIVE_LIBRARY_RESOURCE_ROOT + "/" + entry.getKey(); sonameToResourceMapBuilder.put(entry.getKey(), resource); steps.add(new MkdirStep(getProjectFilesystem(), fatJarDir.resolve(resource).getParent())); steps.add( new SymlinkFileStep( getProjectFilesystem(), getResolver().getAbsolutePath(entry.getValue()), fatJarDir.resolve(resource), /* useAbsolutePaths */ true)); } ImmutableMap<String, String> sonameToResourceMap = sonameToResourceMapBuilder.build(); // Grab the source path representing the fat jar info resource. Path fatJarInfo = fatJarDir.resolve(FatJar.FAT_JAR_INFO_RESOURCE); steps.add(writeFatJarInfo(fatJarInfo, sonameToResourceMap)); // Build up the resource and src collections. Set<Path> javaSourceFilePaths = Sets.newHashSet(); for (String srcResource : FAT_JAR_SRC_RESOURCES) { Path fatJarSource = outputDir.resolve(Paths.get(srcResource).getFileName()); javaSourceFilePaths.add(fatJarSource); steps.add(writeFromResource(fatJarSource, srcResource)); } Path fatJarMainSource = outputDir.resolve(Paths.get(FAT_JAR_MAIN_SRC_RESOURCE).getFileName()); javaSourceFilePaths.add(fatJarMainSource); steps.add(writeFromResource(fatJarMainSource, FAT_JAR_MAIN_SRC_RESOURCE)); // Symlink the inner JAR into it's place in the fat JAR. steps.add( new MkdirStep( getProjectFilesystem(), fatJarDir.resolve(FAT_JAR_INNER_JAR).getParent())); steps.add( new SymlinkFileStep( getProjectFilesystem(), getResolver().getAbsolutePath(innerJar), fatJarDir.resolve(FAT_JAR_INNER_JAR), /* useAbsolutePaths */ true)); // Build the final fat JAR from the structure we've layed out above. We first package the // fat jar resources (e.g. native libs) using the "stored" compression level, to avoid // expensive compression on builds and decompression on startup. Path zipped = outputDir.resolve("contents.zip"); Step zipStep = new ZipStep( getProjectFilesystem(), zipped, ImmutableSet.<Path>of(), /* junkPaths */ false, /* compressionLevel */ 0, fatJarDir); CompileToJarStepFactory compileStepFactory = new JavacToJarStepFactory(javacOptions, JavacOptionsAmender.IDENTITY); compileStepFactory.createCompileStep( context, ImmutableSortedSet.copyOf(javaSourceFilePaths), getBuildTarget(), getResolver(), getProjectFilesystem(), /* classpathEntries */ ImmutableSortedSet.<Path>of(), fatJarDir, /* workingDir */ Optional.<Path>absent(), /* pathToSrcsList */ Optional.<Path>absent(), /* suggestBuildRule */ Optional.<SuggestBuildRules>absent(), steps, buildableContext); steps.add(zipStep); steps.add( new JarDirectoryStep( getProjectFilesystem(), getOutputPath(), ImmutableSortedSet.of(zipped), FatJarMain.class.getName(), /* manifestFile */ null)); return steps.build(); } /** * @return a {@link Step} that generates the fat jar info resource. */ private Step writeFatJarInfo( Path destination, final ImmutableMap<String, String> nativeLibraries) { ByteSource source = new ByteSource() { @Override public InputStream openStream() throws IOException { FatJar fatJar = new FatJar(FAT_JAR_INNER_JAR, nativeLibraries); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); try { fatJar.store(bytes); } catch (Exception e) { throw Throwables.propagate(e); } return new ByteArrayInputStream(bytes.toByteArray()); } }; return new WriteFileStep(getProjectFilesystem(), source, destination, /* executable */ false); } /** * @return a {@link Step} that writes the final from the resource named {@code name}. */ private Step writeFromResource(Path destination, final String name) { return new WriteFileStep( getProjectFilesystem(), Resources.asByteSource(Resources.getResource(name)), destination, /* executable */ false); } private Path getOutputDirectory() { return output.getParent(); } private Path getOutputPath() { return output; } @Override public Path getPathToOutput() { return getOutputPath(); } @Override public Tool getExecutableCommand() { return new CommandTool.Builder() .addArg("java") .addArg("-jar") .addArg(new BuildTargetSourcePath(getBuildTarget())) .build(); } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.example.watchface; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorMatrix; import android.graphics.ColorMatrixColorFilter; import android.graphics.Paint; import android.graphics.Rect; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.support.wearable.watchface.CanvasWatchFaceService; import android.support.wearable.watchface.WatchFaceStyle; import android.text.format.Time; import android.view.SurfaceHolder; import java.util.TimeZone; import java.util.concurrent.TimeUnit; /** * Analog watch face with a ticking second hand. In ambient mode, the second hand isn't shown. On * devices with low-bit ambient mode, the hands are drawn without anti-aliasing in ambient mode. */ public class MyWatchFaceService extends CanvasWatchFaceService { /** * Update rate in milliseconds for interactive mode. We update once a second to advance the * second hand. */ private static final long INTERACTIVE_UPDATE_RATE_MS = TimeUnit.SECONDS.toMillis(1); @Override public Engine onCreateEngine() { return new Engine(); } private class Engine extends CanvasWatchFaceService.Engine { /* Handler to update the time once a second in interactive mode. */ private final Handler mUpdateTimeHandler = new Handler() { @Override public void handleMessage(Message message) { if (R.id.message_update == message.what) { invalidate(); if (shouldTimerBeRunning()) { long timeMs = System.currentTimeMillis(); long delayMs = INTERACTIVE_UPDATE_RATE_MS - (timeMs % INTERACTIVE_UPDATE_RATE_MS); mUpdateTimeHandler.sendEmptyMessageDelayed(R.id.message_update, delayMs); } } } }; private final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { mTime.clear(intent.getStringExtra("time-zone")); mTime.setToNow(); } }; private boolean mRegisteredTimeZoneReceiver = false; // Feel free to change these values and see what happens to the watch face. private static final float HAND_END_CAP_RADIUS = 4f; private static final float STROKE_WIDTH = 4f; private static final int SHADOW_RADIUS = 6; private Time mTime; private Paint mBackgroundPaint; private Paint mHandPaint; private boolean mAmbient; private Bitmap mBackgroundBitmap; private Bitmap mGrayBackgroundBitmap; private float mHourHandLength; private float mMinuteHandLength; private float mSecondHandLength; /** * Whether the display supports fewer bits for each color in ambient mode. * When true, we disable anti-aliasing in ambient mode. */ private boolean mLowBitAmbient; /** * Whether the display supports burn in protection in ambient mode. * When true, remove the background in ambient mode. */ private boolean mBurnInProtection; private int mWidth; private int mHeight; private float mCenterX; private float mCenterY; private float mScale = 1; private Rect mCardBounds = new Rect(); @Override public void onCreate(SurfaceHolder holder) { super.onCreate(holder); setWatchFaceStyle(new WatchFaceStyle.Builder(MyWatchFaceService.this) .setCardPeekMode(WatchFaceStyle.PEEK_MODE_SHORT) .setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE) .setShowSystemUiTime(false) .build()); mBackgroundPaint = new Paint(); mBackgroundPaint.setColor(Color.BLACK); final int backgroundResId = R.drawable.custom_background; mBackgroundBitmap = BitmapFactory.decodeResource(getResources(), backgroundResId); mHandPaint = new Paint(); mHandPaint.setColor(Color.WHITE); mHandPaint.setStrokeWidth(STROKE_WIDTH); mHandPaint.setAntiAlias(true); mHandPaint.setStrokeCap(Paint.Cap.ROUND); mHandPaint.setShadowLayer(SHADOW_RADIUS, 0, 0, Color.BLACK); mHandPaint.setStyle(Paint.Style.STROKE); mTime = new Time(); } @Override public void onDestroy() { mUpdateTimeHandler.removeMessages(R.id.message_update); super.onDestroy(); } @Override public void onPropertiesChanged(Bundle properties) { super.onPropertiesChanged(properties); mLowBitAmbient = properties.getBoolean(PROPERTY_LOW_BIT_AMBIENT, false); mBurnInProtection = properties.getBoolean(PROPERTY_BURN_IN_PROTECTION, false); } @Override public void onTimeTick() { super.onTimeTick(); invalidate(); } @Override public void onAmbientModeChanged(boolean inAmbientMode) { super.onAmbientModeChanged(inAmbientMode); if (mAmbient != inAmbientMode) { mAmbient = inAmbientMode; if (mLowBitAmbient || mBurnInProtection) { mHandPaint.setAntiAlias(!inAmbientMode); } invalidate(); } /* * Whether the timer should be running depends on whether we're visible (as well as * whether we're in ambient mode), so we may need to start or stop the timer. */ updateTimer(); } @Override public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) { super.onSurfaceChanged(holder, format, width, height); mWidth = width; mHeight = height; /* * Find the coordinates of the center point on the screen. * Ignore the window insets so that, on round watches * with a "chin", the watch face is centered on the entire screen, * not just the usable portion. */ mCenterX = mWidth / 2f; mCenterY = mHeight / 2f; mScale = ((float) width) / (float) mBackgroundBitmap.getWidth(); /* * Calculate the lengths of the watch hands and store them in member variables. */ mHourHandLength = mCenterX * 0.5f; mMinuteHandLength = mCenterX * 0.7f; mSecondHandLength = mCenterX * 0.9f; mBackgroundBitmap = Bitmap.createScaledBitmap(mBackgroundBitmap, (int) (mBackgroundBitmap.getWidth() * mScale), (int) (mBackgroundBitmap.getHeight() * mScale), true); if (!mBurnInProtection || !mLowBitAmbient) { initGrayBackgroundBitmap(); } } private void initGrayBackgroundBitmap() { mGrayBackgroundBitmap = Bitmap.createBitmap(mBackgroundBitmap.getWidth(), mBackgroundBitmap.getHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(mGrayBackgroundBitmap); Paint grayPaint = new Paint(); ColorMatrix colorMatrix = new ColorMatrix(); colorMatrix.setSaturation(0); ColorMatrixColorFilter filter = new ColorMatrixColorFilter(colorMatrix); grayPaint.setColorFilter(filter); canvas.drawBitmap(mBackgroundBitmap, 0, 0, grayPaint); } @Override public void onDraw(Canvas canvas, Rect bounds) { mTime.setToNow(); if (mAmbient && (mLowBitAmbient || mBurnInProtection)) { canvas.drawColor(Color.BLACK); } else if (mAmbient) { canvas.drawBitmap(mGrayBackgroundBitmap, 0, 0, mBackgroundPaint); } else { canvas.drawBitmap(mBackgroundBitmap, 0, 0, mBackgroundPaint); } /* * These calculations reflect the rotation in degrees per unit of * time, e.g. 360 / 60 = 6 and 360 / 12 = 30 */ final float secondsRotation = mTime.second * 6f; final float minutesRotation = mTime.minute * 6f; // account for the offset of the hour hand due to minutes of the hour. final float hourHandOffset = mTime.minute / 2f; final float hoursRotation = (mTime.hour * 30) + hourHandOffset; // save the canvas state before we begin to rotate it canvas.save(); canvas.rotate(hoursRotation, mCenterX, mCenterY); drawHand(canvas, mHourHandLength); canvas.rotate(minutesRotation - hoursRotation, mCenterX, mCenterY); drawHand(canvas, mMinuteHandLength); /* * Make sure the "seconds" hand is drawn only when we are in interactive mode. * Otherwise we only update the watch face once a minute. */ if (!mAmbient) { canvas.rotate(secondsRotation - minutesRotation, mCenterX, mCenterY); canvas.drawLine(mCenterX, mCenterY - HAND_END_CAP_RADIUS, mCenterX, mCenterY - mSecondHandLength, mHandPaint); } canvas.drawCircle(mCenterX, mCenterY, HAND_END_CAP_RADIUS, mHandPaint); // restore the canvas' original orientation. canvas.restore(); if (mAmbient) { canvas.drawRect(mCardBounds, mBackgroundPaint); } } private void drawHand(Canvas canvas, float handLength) { canvas.drawRoundRect(mCenterX - HAND_END_CAP_RADIUS, mCenterY - handLength, mCenterX + HAND_END_CAP_RADIUS, mCenterY + HAND_END_CAP_RADIUS, HAND_END_CAP_RADIUS, HAND_END_CAP_RADIUS, mHandPaint); } @Override public void onVisibilityChanged(boolean visible) { super.onVisibilityChanged(visible); if (visible) { registerReceiver(); // Update time zone in case it changed while we weren't visible. mTime.clear(TimeZone.getDefault().getID()); mTime.setToNow(); } else { unregisterReceiver(); } /* * Whether the timer should be running depends on whether we're visible * (as well as whether we're in ambient mode), * so we may need to start or stop the timer. */ updateTimer(); } @Override public void onPeekCardPositionUpdate(Rect rect) { super.onPeekCardPositionUpdate(rect); mCardBounds.set(rect); } private void registerReceiver() { if (mRegisteredTimeZoneReceiver) { return; } mRegisteredTimeZoneReceiver = true; IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED); MyWatchFaceService.this.registerReceiver(mTimeZoneReceiver, filter); } private void unregisterReceiver() { if (!mRegisteredTimeZoneReceiver) { return; } mRegisteredTimeZoneReceiver = false; MyWatchFaceService.this.unregisterReceiver(mTimeZoneReceiver); } private void updateTimer() { mUpdateTimeHandler.removeMessages(R.id.message_update); if (shouldTimerBeRunning()) { mUpdateTimeHandler.sendEmptyMessage(R.id.message_update); } } /** * Returns whether the {@link #mUpdateTimeHandler} timer should be running. The timer * should only run when we're visible and in interactive mode. */ private boolean shouldTimerBeRunning() { return isVisible() && !isInAmbientMode(); } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util; import com.intellij.diagnostic.PluginException; import com.intellij.model.Symbol; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.registry.RegistryValue; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiNamedElement; import com.intellij.psi.ResolveResult; import com.intellij.util.containers.ConcurrentFactoryMap; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.JBIterable; import org.jetbrains.annotations.*; import java.util.*; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ThreadLocalRandom; public final class IdempotenceChecker { private static final Logger LOG = Logger.getInstance(IdempotenceChecker.class); private static final Set<Class<?>> ourReportedValueClasses = Collections.synchronizedSet(new HashSet<>()); private static final ThreadLocal<Integer> ourRandomCheckNesting = ThreadLocal.withInitial(() -> 0); @SuppressWarnings("SSBasedInspection") private static final ThreadLocal<List<String>> ourLog = new ThreadLocal<>(); private static final RegistryValue ourRateCheckProperty = Registry.get("platform.random.idempotence.check.rate"); /** * Perform some basic checks whether the two given objects are equivalent and interchangeable, * as described in e.g {@link com.intellij.psi.util.CachedValue} contract. This method should be used * in places caching results of various computations, which are expected to be idempotent: * they can be performed several times, or on multiple threads, and the results should be interchangeable.<p></p> * * What to do if you get an error from here: * <ul> * <li> * Start by looking carefully at the computation (which usually can be found by navigating the stack trace) * and find out why it could be non-idempotent. See common culprits below.</li> * <li> * Add logging inside the computation by using {@link #logTrace}. * </li> * <li> * If the computation is complex and depends on other caches, you could try to perform * {@code IdempotenceChecker.checkEquivalence()} for their results as well, localizing the error.</li> * <li> * If it's a test, you could try reproducing and debugging it. To increase the probability of failure, * you can temporarily add {@code Registry.get("platform.random.idempotence.check.rate").setValue(1, getTestRootDisposable())} * to perform the idempotence check on every cache access. Note that this can make your test much slower. * </li> * </ul> * * Common culprits: * <ul> * <li>Caching and returning a mutable object (e.g. array or List), which clients then mutate from different threads; * to fix, either clone the return value or use unmodifiable wrappers</li> * <li>Depending on a {@link ThreadLocal} or method parameters with different values.</li> * <li>For failures from {@link #applyForRandomCheck}: outdated cached value (not all dependencies are specified, or their modification counters aren't properly incremented)</li> * </ul> * * @param existing the value computed on the first invocation * @param fresh the value computed a bit later, expected to be equivalent * @param providerClass a class of the function performing the computation, used to prevent reporting the same error multiple times * @param recomputeValue optionally, a way to recalculate the value one more time with {@link #isLoggingEnabled()} true, * and include the log collected via {@link #logTrace} into exception report. */ public static <T> void checkEquivalence(@Nullable T existing, @Nullable T fresh, @NotNull Class<?> providerClass, @Nullable Computable<? extends T> recomputeValue) { String msg = checkValueEquivalence(existing, fresh); if (msg != null) { reportFailure(existing, fresh, providerClass, recomputeValue, msg); } } private static <T> void reportFailure(@Nullable T existing, @Nullable T fresh, @NotNull Class<?> providerClass, @Nullable Computable<? extends T> recomputeValue, String msg) { boolean shouldReport = ApplicationManager.getApplication().isUnitTestMode() || ourReportedValueClasses.add(providerClass); if (shouldReport) { if (recomputeValue != null) { msg += recomputeWithLogging(existing, fresh, recomputeValue); } LOG.error(PluginException.createByClass(msg, null, providerClass)); } } @NotNull private static <T> String recomputeWithLogging(@Nullable T existing, @Nullable T fresh, @NotNull Computable<? extends T> recomputeValue) { ResultWithLog<T> rwl = computeWithLogging(recomputeValue); T freshest = rwl.result; @NonNls String msg = "\n\nRecomputation gives " + objAndClass(freshest); if (checkValueEquivalence(existing, freshest) == null) { msg += " which is equivalent to 'existing'"; } else if (checkValueEquivalence(fresh, freshest) == null) { msg += " which is equivalent to 'fresh'"; } else { msg += " which is different from both values"; } if (!rwl.log.isEmpty() && !(freshest instanceof ResultWithLog)) { msg += "\nRecomputation log:\n" + rwl.printLog(); } return msg; } /** * Run the given computation with internal logging enabled to help debug {@link #checkEquivalence} failures. * @return Both the computation result and the log * @see #logTrace(String) */ @NotNull public static <T> ResultWithLog<T> computeWithLogging(Computable<? extends T> recomputeValue) { List<String> threadLog = ourLog.get(); boolean outermost = threadLog == null; if (outermost) { ourLog.set(threadLog = new ArrayList<>()); } try { int start = threadLog.size(); T result = recomputeValue.compute(); return new ResultWithLog<>(result, new ArrayList<>(threadLog.subList(start, threadLog.size()))); } finally { if (outermost) { ourLog.set(null); } } } private static @NonNls String objAndClass(Object o) { if (o == null) return "null"; String s = o.toString(); return s.contains(o.getClass().getSimpleName()) || o instanceof String || o instanceof Number || o instanceof Class ? s : s + " (class " + o.getClass().getName() + ")"; } private static String checkValueEquivalence(@Nullable Object existing, @Nullable Object fresh) { if (existing == fresh) return null; String eqMsg = checkClassEquivalence(existing, fresh); if (eqMsg != null) return eqMsg; Object[] eArray = asArray(existing); if (eArray != null) { return checkArrayEquivalence(eArray, Objects.requireNonNull(asArray(fresh)), existing); } if (existing instanceof ResultWithLog) { return whichIsField("result", existing, fresh, checkValueEquivalence(((ResultWithLog<?>)existing).getResult(), ((ResultWithLog<?>)fresh).getResult())); } if (existing instanceof CachedValueBase.Data) { return checkCachedValueData((CachedValueBase.Data<?>)existing, (CachedValueBase.Data<?>)fresh); } if (existing instanceof List || isOrderedSet(existing)) { return checkCollectionElements((Collection<?>)existing, (Collection<?>)fresh); } if (isOrderedMap(existing)) { return checkCollectionElements(((Map<?,?>)existing).entrySet(), ((Map<?,?>)fresh).entrySet()); } if (existing instanceof Set) { return whichIsField("size", existing, fresh, checkCollectionSizes(((Set<?>)existing).size(), ((Set<?>)fresh).size())); } if (existing instanceof Map) { if (existing instanceof ConcurrentMap) { return null; // likely to be filled lazily } return whichIsField("size", existing, fresh, checkCollectionSizes(((Map<?,?>)existing).size(), ((Map<?,?>)fresh).size())); } if (isExpectedToHaveSaneEquals(existing) && !existing.equals(fresh)) { return reportProblem(existing, fresh); } if (existing instanceof PsiNamedElement) { return checkPsiEquivalence((PsiElement)existing, (PsiElement)fresh); } if (existing instanceof ResolveResult) { PsiElement existingPsi = ((ResolveResult)existing).getElement(); PsiElement freshPsi = ((ResolveResult)fresh).getElement(); if (existingPsi != freshPsi) { String s = checkClassEquivalence(existingPsi, freshPsi); if (s == null) s = checkPsiEquivalence(existingPsi, freshPsi); return whichIsField("element", existing, fresh, s); } return null; } return null; } private static boolean isOrderedMap(Object o) { return o instanceof LinkedHashMap || o instanceof SortedMap; } private static boolean isOrderedSet(Object o) { return o instanceof LinkedHashSet || o instanceof SortedSet; } private static String whichIsField(@NotNull @NonNls String field, @NotNull Object existing, @NotNull Object fresh, @Nullable String msg) { return msg == null ? null : appendDetail(msg, "which is " + field + " of " + existing + " and " + fresh); } private static Object @Nullable [] asArray(Object o) { if (o instanceof Object[]) return (Object[])o; if (o instanceof Map.Entry) return new Object[]{((Map.Entry<?,?>)o).getKey(), ((Map.Entry<?,?>)o).getValue()}; if (o instanceof Pair) return new Object[]{((Pair<?,?>)o).first, ((Pair<?,?>)o).second}; if (o instanceof Trinity) return new Object[]{((Trinity<?,?,?>)o).first, ((Trinity<?,?,?>)o).second, ((Trinity<?,?,?>)o).third}; return null; } private static String checkCachedValueData(@NotNull CachedValueBase.Data<?> existing, @NotNull CachedValueBase.Data<?> fresh) { Object[] deps1 = existing.getDependencies(); Object[] deps2 = fresh.getDependencies(); Object eValue = existing.get(); Object fValue = fresh.get(); if (deps1.length != deps2.length) { String msg = reportProblem(deps1.length, deps2.length); msg = appendDetail(msg, "which is length of CachedValue dependencies: " + Arrays.toString(deps1) + " and " + Arrays.toString(deps2)); msg = appendDetail(msg, "where values are " + objAndClass(eValue) + " and " + objAndClass(fValue)); return msg; } return checkValueEquivalence(eValue, fValue); } private static boolean isExpectedToHaveSaneEquals(@NotNull Object existing) { return existing instanceof Comparable || existing instanceof Symbol; } @Contract("null,_->!null;_,null->!null") private static String checkClassEquivalence(@Nullable Object existing, @Nullable Object fresh) { if (existing == null || fresh == null) { return reportProblem(existing, fresh); } Class<?> c1 = existing.getClass(); Class<?> c2 = fresh.getClass(); if (c1 != c2 && !objectsOfDifferentClassesCanStillBeEquivalent(existing, fresh)) { return whichIsField("class", existing, fresh, reportProblem(c1, c2)); } return null; } private static boolean objectsOfDifferentClassesCanStillBeEquivalent(@NotNull Object existing, @NotNull Object fresh) { if (existing instanceof Map && fresh instanceof Map && isOrderedMap(existing) == isOrderedMap(fresh)) return true; if (existing instanceof Set && fresh instanceof Set && isOrderedSet(existing) == isOrderedSet(fresh)) return true; if (existing instanceof List && fresh instanceof List) return true; if (existing instanceof PsiNamedElement && fresh instanceof PsiNamedElement) return true; // ClsClassImpl might be equal to PsiClass return ContainerUtil.intersects(allSupersWithEquals.get(existing.getClass()), allSupersWithEquals.get(fresh.getClass())); } @SuppressWarnings("rawtypes") private static final Map<Class, Set<Class>> allSupersWithEquals = ConcurrentFactoryMap.createMap( clazz -> JBIterable .generate(clazz, Class::getSuperclass) .filter(c -> c != Object.class && ReflectionUtil.getDeclaredMethod(c, "equals", Object.class) != null) .toSet()); private static String checkPsiEquivalence(@NotNull PsiElement existing, @NotNull PsiElement fresh) { if (!existing.equals(fresh) && !existing.isEquivalentTo(fresh) && !fresh.isEquivalentTo(existing) && (seemsToBeResolveTarget(existing) || seemsToBeResolveTarget(fresh))) { return reportProblem(existing, fresh); } return null; } private static boolean seemsToBeResolveTarget(@NotNull PsiElement psi) { if (psi.isPhysical()) return true; PsiElement nav = psi.getNavigationElement(); return nav != null && nav.isPhysical(); } private static String checkCollectionElements(@NotNull Collection<?> existing, @NotNull Collection<?> fresh) { if (fresh.isEmpty()) { return null; // for cases when an empty collection is cached and then filled lazily on request } return checkArrayEquivalence(existing.toArray(), fresh.toArray(), existing); } private static String checkCollectionSizes(int size1, int size2) { if (size2 == 0) { return null; // for cases when an empty collection is cached and then filled lazily on request } if (size1 != size2) { return reportProblem(size1, size2); } return null; } private static String checkArrayEquivalence(Object[] a1, Object[] a2, Object original1) { int len1 = a1.length; int len2 = a2.length; if (len1 != len2) { return appendDetail(reportProblem(len1, len2), "which is length of " + Arrays.toString(a1) + " and " + Arrays.toString(a2)); } for (int i = 0; i < len1; i++) { String msg = checkValueEquivalence(a1[i], a2[i]); if (msg != null) { return whichIsField(original1 instanceof Map.Entry ? (i == 0 ? "key" : "value") : i + "th element", Arrays.toString(a1), Arrays.toString(a2), msg); } } return null; } private static String reportProblem(Object o1, Object o2) { return appendDetail("Non-idempotent computation: it returns different results when invoked multiple times or on different threads:", objAndClass(o1) + " != " + objAndClass(o2)); } private static String appendDetail(@NonNls String message, @NonNls String detail) { return message + "\n " + StringUtil.trimLog(detail, 10_000); } /** * @return whether random checks are enabled and it makes sense to call a potentially expensive {@link #applyForRandomCheck} at all. */ public static boolean areRandomChecksEnabled() { return ApplicationManager.getApplication().isUnitTestMode() && !ApplicationManagerEx.isInStressTest(); } /** * Useful when your test checks how many times a specific code was called, and random checks make that test flaky. */ @TestOnly public static void disableRandomChecksUntil(Disposable parentDisposable) { ourRateCheckProperty.setValue(0, parentDisposable); } /** * Call this when accessing an already cached value, so that once in a while * (depending on "platform.random.idempotence.check.rate" registry value) * the computation is re-run and checked for consistency with that cached value. */ public static <T> void applyForRandomCheck(T data, Object provider, Computable<? extends T> recomputeValue) { if (areRandomChecksEnabled() && shouldPerformRandomCheck()) { RecursionGuard.StackStamp stamp = RecursionManager.markStack(); Integer prevNesting = ourRandomCheckNesting.get(); ourRandomCheckNesting.set(prevNesting + 1); try { T fresh = recomputeValue.compute(); if (stamp.mayCacheNow()) { checkEquivalence(data, fresh, provider.getClass(), recomputeValue); } } finally { ourRandomCheckNesting.set(prevNesting); } } } private static boolean shouldPerformRandomCheck() { int rate = ourRateCheckProperty.asInteger(); return rate > 0 && ThreadLocalRandom.current().nextInt(rate) == 0; } @TestOnly public static boolean isCurrentThreadInsideRandomCheck() { return ourRandomCheckNesting.get() > 0; } /** * @return whether {@link #logTrace} will actually log anything */ public static boolean isLoggingEnabled() { return ourLog.get() != null; } /** * Log a message to help debug {@link #checkEquivalence} failures. When such a failure occurs, the computation can be re-run again * with this logging enabled, and the collected log will be included into exception message. */ public static void logTrace(@NotNull @NonNls String message) { List<String> log = ourLog.get(); if (log != null) { log.add(message); } } public static final class ResultWithLog<T> { private final T result; private final List<String> log; private ResultWithLog(T result, List<String> log) { this.result = result; this.log = log; } public T getResult() { return result; } String printLog() { return StringUtil.join(log, s -> " " + s, "\n"); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ResultWithLog)) return false; ResultWithLog<?> log = (ResultWithLog<?>)o; return Arrays.deepEquals(new Object[]{result}, new Object[]{log.result}); } @Override public int hashCode() { return Objects.hash(result); } @Override public String toString() { return "ResultWithLog{" + result + (log.isEmpty() ? "" : ", log='\n" + printLog() + '\'') + '}'; } } }
/* Copyright 2008 Jenkov Development Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.jenkov.db.impl.mapping; import com.jenkov.db.itf.mapping.ICustomObjectMapper; import com.jenkov.db.itf.mapping.IObjectMapping; import com.jenkov.db.itf.mapping.IKey; import com.jenkov.db.itf.mapping.IGetterMapping; import com.jenkov.db.itf.PersistenceException; import java.util.*; /** * This class is a fast implementation of an object mapping key. The reason it is fast is, * that the id returned by <code>getId()</code> (an int) is used both in the <code>hashCode()</code> call and the * <code>equals(Object o)</code> call. These two methods are used when the object mapping key is used * in hash maps, as is the case with the default implementation of the object mapping cache. * * Using a string or some other class as the key may be slightly slower, since the <code>hashCode()</code> * and the <code>equals()</code> calls may be slower than this implementation. * * <br/><br/> * <code>ObjectMappingKey</code> instances can also contain an <code>ICustomObjectMapper</code> instance which will * create, or assist in creating, the object mapping this <code>ObjectMappingKey</code> instance represents. * * <br/><br/> * When creating object mapping keys, * assign them to a constant in some class of yours, like the example below (reading user * instances from the database): * * <br/><br/> * * <code> * public class ObjectMappingKeys{ <br/> * <br/> * &nbsp;&nbsp;&nbsp;public USER_READ = ObjectMappingKey.createInstance(User.class, "User Read"); <br/> * &nbsp;&nbsp;&nbsp;public USER_INSERT = ObjectMappingKey.createInstance(User.class, "User Insert"); <br/> * &nbsp;&nbsp;&nbsp;public USER_UPDATE = ObjectMappingKey.createInstance(User.class, "User Update"); <br/> * &nbsp;&nbsp;&nbsp;public USER_UPDATE_LAST_LOGIN = ObjectMappingKey.createInstance(User.class, "User Update Last Login"); <br/> * &nbsp;&nbsp;&nbsp;public USER_UPDATE_PASSWORD = ObjectMappingKey.createInstance(User.class, "User Update Password"); <br/> * &nbsp;&nbsp;&nbsp;public USER_DEACTIVATE = ObjectMappingKey.createInstance(User.class, "User Deactivate"); <br/> * &nbsp;&nbsp;&nbsp;public USER_DELETE = ObjectMappingKey.createInstance(User.class, "User Delete"); <br/> * <br/> * } <br/> * */ public class ObjectMappingKey { private static int nextId = 0; private int id = 0; private Class objectClass = null; private ICustomObjectMapper objectMapper= null; private ObjectMappingKey(int id, Class objectClass, ICustomObjectMapper mapper){ this.id = id; this.objectClass = objectClass; this.objectMapper = mapper; } /** * Returns the id of this object mapping key. This id is provided by the user at creation time * and must be unique across all instances of ObjectMappingKey. * @return The id of this object mapping key. */ public int getId() { return id; } /** * Returns the class mapped by the object mapping this <code>ObjectMappingKey</code> * instance is key for. * Setting the class for the object mapping key is optional. It is just a help * for you to identify object mapping keys at runtime. * Setting the class also enables the AbstractDao subclasses to automatically generate * an object mapping for this object mapping key, if none is cached already. * * @return The class mapped by the object mapping stored in the object mapping cache * under this object mapping key. */ public Class getObjectClass(){ return this.objectClass; } public ICustomObjectMapper getCustomObjectMapper(){ return this.objectMapper; } public String toString() { return "id = " + this.id + ", " + getObjectClass(); } /** * Returns the hash code of this object mapping key. Since the id of the object * method key is unique, the id is also used as the hash code of the * object mapping key instance. * @return The hash code for this ObjectMappingKey instance. */ public int hashCode() { return this.id; } public boolean equals(Object obj) { if(obj == null) return false; if(! (obj instanceof ObjectMappingKey)) return false; return this.id == ((ObjectMappingKey) obj).getId(); } /** * Creates an instance of <code>ObjectMappingKey</code> with the object class set only. * A unique id will be assigned to the <code>ObjectMappingKey</code> before it is returned. * * <br/><br/> * When creating object mapping keys, assign them to a constant in some class of yours. * * @param objectClass The class mapped by the object mapping that this ObjectMappingKey instance * is key for. * @return An <code>ObjectMappingKey</code> instance with the given name and object class set. */ public static synchronized ObjectMappingKey createInstance(Class objectClass){ return createInstance(objectClass, null); } /** * Creates an instance of <code>ObjectMappingKey</code> with both object class, name, and * a custom object mapper set. * A unique id will be assigned to the <code>ObjectMappingKey</code> before it is returned. * The <code>ICustomObjectMapper</code> will be used when creating an object mapping for * this object mapping key. * * <br/><br/> * When creating object mapping keys, assign them to a constant in some class of yours. * * @param objectClass The class mapped by the object mapping that this ObjectMappingKey instance * is key for. * @return An <code>ObjectMappingKey</code> instance with the given name and object class set. */ public static synchronized ObjectMappingKey createInstance(Class objectClass, ICustomObjectMapper mapper){ ObjectMappingKey newKey = new ObjectMappingKey(nextId++, objectClass, mapper); return newKey; } /** * Creates an instance of <code>ObjectMappingKey</code> with both object class, and * a custom object mapper set. The custom object mapper will be set internally by this factory method. * The custom object mapper will mark all the columns in the columns array as auto generated. * A unique id will be assigned to the <code>ObjectMappingKey</code> before it is returned. * The <code>ICustomObjectMapper</code> will be used when creating an object mapping for * this object mapping key. * * <br/><br/> * When creating object mapping keys, assign them to a constant in some class of yours. * * @param objectClass The class mapped by the object mapping that this ObjectMappingKey instance * is key for. * @return An <code>ObjectMappingKey</code> instance with the given name and object class set. */ public static synchronized ObjectMappingKey createInstanceForAutoGeneratedColumns( Class objectClass, final String[] columns){ return createInstance(objectClass, createAutoGeneratedColumnsCustomMapper(columns)); /* ObjectMappingKey newKey = new ObjectMappingKey(nextId++, objectClass, null, createAutoGeneratedColumnsCustomMapper(columns)); objectMappingKeys.add(newKey); return newKey; */ } /** * Creates an instance of <code>ObjectMappingKey</code> with both object class, table name, and * a custom object mapper set. The custom object mapper will be set internally by this factory method. * The custom object mapper will mark all the columns in the columns array as auto generated, * and return the given table name as the table to map the class to. * A unique id will be assigned to the <code>ObjectMappingKey</code> before it is returned. * The <code>ICustomObjectMapper</code> will be used when creating an object mapping for * this object mapping key. * * <br/><br/> * When creating object mapping keys, assign them to a constant in some class of yours. * * @param objectClass The class mapped by the object mapping that this ObjectMappingKey instance * is key for. * @param tableName The name of the table to map this class to. * @return An <code>ObjectMappingKey</code> instance with the given name and object class set. */ public static synchronized ObjectMappingKey createInstanceForCustomTableAutoGeneratedColumns( Class objectClass, final String[] columns, String tableName){ return createInstance(objectClass, createCustomTableAutoGeneratedColumnsCustomMapper(columns, tableName)); /* ObjectMappingKey newKey = new ObjectMappingKey(nextId++, objectClass, null, createCustomTableAutoGeneratedColumnsCustomMapper(columns, tableName)); objectMappingKeys.add(newKey); return newKey; */ } /** * Creates an instance of <code>ObjectMappingKey</code> with both object class and * a custom object mapper set. The custom object mapper will be set internally by this factory method. * The custom object mapper will mark all the columns in the primary key as auto generated. * A unique id will be assigned to the <code>ObjectMappingKey</code> before it is returned. * The <code>ICustomObjectMapper</code> will be used when creating an object mapping for * this object mapping key. * * <br/><br/> * When creating object mapping keys, assign them to a constant in some class of yours. * * @param objectClass The class mapped by the object mapping that this ObjectMappingKey instance * is key for. * @return An <code>ObjectMappingKey</code> instance with the given name and object class set. */ public static synchronized ObjectMappingKey createInstanceForAutoGeneratedPrimaryKey(Class objectClass){ return createInstance(objectClass, createAutoGeneratedPrimaryKeyCustomMapper()); /* ObjectMappingKey newKey = new ObjectMappingKey(nextId++, objectClass, null, createAutoGeneratedPrimaryKeyCustomMapper()); objectMappingKeys.add(newKey); return newKey; */ } /** * Creates an instance of <code>ObjectMappingKey</code> with both object class, table name, and * a custom object mapper set. The custom object mapper will be set internally by this factory method. * The custom object mapper will mark all the columns in the primary key as auto generated, * and map the class to the given table name. * A unique id will be assigned to the <code>ObjectMappingKey</code> before it is returned. * The <code>ICustomObjectMapper</code> will be used when creating an object mapping for * this object mapping key. * * <br/><br/> * When creating object mapping keys, assign them to a constant in some class of yours. * * @param objectClass The class mapped by the object mapping that this ObjectMappingKey instance * is key for. * @param tableName The name of the table to map that class to. * @return An <code>ObjectMappingKey</code> instance with the given name and object class set. */ public static synchronized ObjectMappingKey createInstanceForCustomTableAutoGeneratedPrimaryKey(Class objectClass, String tableName){ return createInstance(objectClass, createCustomTableAutoGeneratedPrimaryKeyCustomMapper(tableName)); /* ObjectMappingKey newKey = new ObjectMappingKey(nextId++, objectClass, null, createCustomTableAutoGeneratedPrimaryKeyCustomMapper(tableName)); objectMappingKeys.add(newKey); return newKey; */ } /* Not necessary. Annotation Based Mapping is enabled in the ObjectReader. public static synchronized ObjectMappingKey createInstanceAnnotationBasedMapping(Class objectClass){ return new ObjectMappingKey(nextId++, objectClass, null, new CustomObjectMapperAnnotationBased()); } */ private static CustomObjectMapperBase createAutoGeneratedColumnsCustomMapper(final String[] columns) { return new CustomObjectMapperBase(){ public void modify(Object objectMappingKey, IObjectMapping mapping) throws PersistenceException { for(int i=0; i<columns.length; i++){ mapping.getGetterMapping(columns[i]).setAutoGenerated(true); } } }; } private static CustomObjectMapperBase createCustomTableAutoGeneratedColumnsCustomMapper(final String[] columns, final String tableName) { return new CustomObjectMapperBase(){ public String getTableName(Object objectMappingKey) throws PersistenceException { return tableName; } public void modify(Object objectMappingKey, IObjectMapping mapping) throws PersistenceException { for(int i=0; i<columns.length; i++){ IGetterMapping getterMapping = mapping.getGetterMapping(columns[i]); if(getterMapping == null){ throw new NullPointerException("No getter mapping found for column name " + columns[i] + ". Perhaps the columns name is misspelled, or have the wrong case " + "(some databases convert all column names to UPPERCASE internally)."); } mapping.getGetterMapping(columns[i]).setAutoGenerated(true); } } }; } private static CustomObjectMapperBase createAutoGeneratedPrimaryKeyCustomMapper() { return new CustomObjectMapperBase(){ public void modify(Object objectMappingKey, IObjectMapping mapping) throws PersistenceException { IKey key = mapping.getPrimaryKey(); Iterator iterator = key.getColumns().iterator(); while(iterator.hasNext()){ String columnName = (String) iterator.next(); mapping.getGetterMapping(columnName).setAutoGenerated(true); } } }; } private static CustomObjectMapperBase createCustomTableAutoGeneratedPrimaryKeyCustomMapper(final String tableName) { return new CustomObjectMapperBase(){ public String getTableName(Object objectMappingKey) throws PersistenceException { return tableName; } public void modify(Object objectMappingKey, IObjectMapping mapping) throws PersistenceException { IKey key = mapping.getPrimaryKey(); Iterator iterator = key.getColumns().iterator(); while(iterator.hasNext()){ String columnName = (String) iterator.next(); mapping.getGetterMapping(columnName).setAutoGenerated(true); } } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.xmlsecurity; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.security.GeneralSecurityException; import java.security.KeyStore; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; import javax.xml.XMLConstants; import javax.xml.crypto.dsig.DigestMethod; import javax.xml.crypto.dsig.XMLSignature; import javax.xml.crypto.dsig.spec.XPathFilterParameterSpec; import javax.xml.namespace.NamespaceContext; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Source; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.SAXException; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.component.xmlsecurity.api.DefaultXAdESSignatureProperties; import org.apache.camel.component.xmlsecurity.api.XAdESEncapsulatedPKIData; import org.apache.camel.component.xmlsecurity.api.XAdESSignatureProperties; import org.apache.camel.component.xmlsecurity.api.XmlSignatureConstants; import org.apache.camel.component.xmlsecurity.api.XmlSignatureException; import org.apache.camel.component.xmlsecurity.api.XmlSignatureHelper; import org.apache.camel.component.xmlsecurity.api.XmlSignatureProperties; import org.apache.camel.component.xmlsecurity.util.TestKeystore; import org.apache.camel.spi.Registry; import org.apache.camel.support.SimpleRegistry; import org.apache.camel.test.junit5.CamelTestSupport; import org.apache.camel.test.junit5.TestSupport; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.apache.camel.component.xmlsecurity.XmlSignatureTest.checkThrownException; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; public class XAdESSignaturePropertiesTest extends CamelTestSupport { private static final String NOT_EMPTY = "NOT_EMPTY"; private static String payload; static { boolean includeNewLine = true; if (TestSupport.getJavaMajorVersion() >= 9 || TestSupport.isJava18_261_later() && !TestSupport.isJavaVendor("Azul")) { includeNewLine = false; } payload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + (includeNewLine ? "\n" : "") + "<root xmlns=\"http://test/test\"><test>Test Message</test></root>"; } @Override @BeforeEach public void setUp() throws Exception { disableJMX(); super.setUp(); } @Override protected Registry createCamelRegistry() throws Exception { Registry registry = new SimpleRegistry(); registry.bind("keyAccessorDefault", TestKeystore.getKeyAccessor("bob")); registry.bind("xmlSignatureProperties", getXmlSignatureProperties("bob")); Map<String, String> namespaceMap = Collections.singletonMap("ns", "http://test"); List<XPathFilterParameterSpec> xpaths = Collections .singletonList(XmlSignatureHelper.getXpathFilter("/ns:root/a/@ID", namespaceMap)); registry.bind("xpathsToIdAttributes", xpaths); return registry; } @Override protected RouteBuilder[] createRouteBuilders() throws Exception { return new RouteBuilder[] { new RouteBuilder() { public void configure() throws Exception { onException(XmlSignatureException.class).handled(true).to("mock:exception"); from("direct:enveloped") .to("xmlsecurity-sign:xades?keyAccessor=#keyAccessorDefault&properties=#xmlSignatureProperties&parentLocalName=root&parentNamespace=http://test/test") .to("mock:result"); } }, new RouteBuilder() { public void configure() throws Exception { onException(XmlSignatureException.class).handled(true).to("mock:exception"); from("direct:enveloping") .to("xmlsecurity-sign:xades?keyAccessor=#keyAccessorDefault&properties=#xmlSignatureProperties") .to("mock:result"); } }, new RouteBuilder() { public void configure() throws Exception { onException(XmlSignatureException.class).handled(true).to("mock:exception"); from("direct:emptySignatureId").to( "xmlsecurity-sign:xades?keyAccessor=#keyAccessorDefault&properties=#xmlSignatureProperties&signatureId=") .to( "mock:result"); } }, new RouteBuilder() { public void configure() throws Exception { onException(Exception.class).handled(false).to("mock:exception"); from("direct:detached").to( "xmlsecurity-sign:detached?keyAccessor=#keyAccessorDefault&xpathsToIdAttributes=#xpathsToIdAttributes&"// + "schemaResourceUri=org/apache/camel/component/xmlsecurity/Test.xsd&properties=#xmlSignatureProperties") .to("mock:result"); } } }; } @Test public void envelopingAllParameters() throws Exception { Document doc = testEnveloping(); Map<String, String> prefix2Namespace = getPrefix2NamespaceMap(); String pathToSignatureProperties = getPathToSignatureProperties(); // signing time checkXpath(doc, pathToSignatureProperties + "etsi:SigningTime/text()", prefix2Namespace, NOT_EMPTY); // signing certificate checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:CertDigest/ds:DigestMethod/@Algorithm", prefix2Namespace, DigestMethod.SHA256); checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:CertDigest/ds:DigestValue/text()", prefix2Namespace, NOT_EMPTY); checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:IssuerSerial/ds:X509IssuerName/text()", prefix2Namespace, NOT_EMPTY); checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:IssuerSerial/ds:X509SerialNumber/text()", prefix2Namespace, NOT_EMPTY); checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/@URI", prefix2Namespace, "http://certuri"); // signature policy checkXpath(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyId/etsi:Identifier/text()", prefix2Namespace, "1.2.840.113549.1.9.16.6.1"); checkXpath(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyId/etsi:Identifier/@Qualifier", prefix2Namespace, "OIDAsURN"); checkXpath(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyId/etsi:Description/text()", prefix2Namespace, "invoice version 3.1"); checkXpath(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyHash/ds:DigestMethod/@Algorithm", prefix2Namespace, DigestMethod.SHA256); checkXpath(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyHash/ds:DigestValue/text()", prefix2Namespace, "Ohixl6upD6av8N7pEvDABhEL6hM="); checkXpath( doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyQualifiers/etsi:SigPolicyQualifier[1]/etsi:SPURI/text()", prefix2Namespace, "http://test.com/sig.policy.pdf"); checkXpath( doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyQualifiers/etsi:SigPolicyQualifier[1]/etsi:SPUserNotice/etsi:ExplicitText/text()", prefix2Namespace, "display text"); checkXpath(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyQualifiers/etsi:SigPolicyQualifier[2]/text()", prefix2Namespace, "category B"); checkXpath( doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyId/etsi:DocumentationReferences/etsi:DocumentationReference[1]/text()", prefix2Namespace, "http://test.com/policy.doc.ref1.txt"); checkXpath( doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId/etsi:SigPolicyId/etsi:DocumentationReferences/etsi:DocumentationReference[2]/text()", prefix2Namespace, "http://test.com/policy.doc.ref2.txt"); // production place checkXpath(doc, pathToSignatureProperties + "etsi:SignatureProductionPlace/etsi:City/text()", prefix2Namespace, "Munich"); checkXpath(doc, pathToSignatureProperties + "etsi:SignatureProductionPlace/etsi:StateOrProvince/text()", prefix2Namespace, "Bavaria"); checkXpath(doc, pathToSignatureProperties + "etsi:SignatureProductionPlace/etsi:PostalCode/text()", prefix2Namespace, "80331"); checkXpath(doc, pathToSignatureProperties + "etsi:SignatureProductionPlace/etsi:CountryName/text()", prefix2Namespace, "Germany"); // signer role checkXpath(doc, pathToSignatureProperties + "etsi:SignerRole/etsi:ClaimedRoles/etsi:ClaimedRole[1]/text()", prefix2Namespace, "test"); checkXpath(doc, pathToSignatureProperties + "etsi:SignerRole/etsi:ClaimedRoles/etsi:ClaimedRole[2]/TestRole/text()", prefix2Namespace, "TestRole"); checkXpath(doc, pathToSignatureProperties + "etsi:SignerRole/etsi:CertifiedRoles/etsi:CertifiedRole/text()", prefix2Namespace, "Ahixl6upD6av8N7pEvDABhEL6hM="); checkXpath(doc, pathToSignatureProperties + "etsi:SignerRole/etsi:CertifiedRoles/etsi:CertifiedRole/@Encoding", prefix2Namespace, "http://uri.etsi.org/01903/v1.2.2#DER"); checkXpath(doc, pathToSignatureProperties + "etsi:SignerRole/etsi:CertifiedRoles/etsi:CertifiedRole/@Id", prefix2Namespace, "IdCertifiedRole"); String pathToDataObjectProperties = "/ds:Signature/ds:Object/etsi:QualifyingProperties/etsi:SignedProperties/etsi:SignedDataObjectProperties/"; //DataObjectFormat checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/etsi:Description/text()", prefix2Namespace, "invoice"); checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/etsi:MimeType/text()", prefix2Namespace, "text/xml"); checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/@ObjectReference", prefix2Namespace, "#", true); checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/etsi:ObjectIdentifier/etsi:Identifier/text()", prefix2Namespace, "1.2.840.113549.1.9.16.6.2"); checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/etsi:ObjectIdentifier/etsi:Identifier/@Qualifier", prefix2Namespace, "OIDAsURN"); checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/etsi:ObjectIdentifier/etsi:Description/text()", prefix2Namespace, "identifier desc"); checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/etsi:ObjectIdentifier/etsi:DocumentationReferences/etsi:DocumentationReference[1]/text()", prefix2Namespace, "http://test.com/dataobject.format.doc.ref1.txt"); checkXpath(doc, pathToDataObjectProperties + "etsi:DataObjectFormat/etsi:ObjectIdentifier/etsi:DocumentationReferences/etsi:DocumentationReference[2]/text()", prefix2Namespace, "http://test.com/dataobject.format.doc.ref2.txt"); //commitment checkXpath(doc, pathToDataObjectProperties + "etsi:CommitmentTypeIndication/etsi:CommitmentTypeId/etsi:Identifier/text()", prefix2Namespace, "1.2.840.113549.1.9.16.6.4"); checkXpath(doc, pathToDataObjectProperties + "etsi:CommitmentTypeIndication/etsi:CommitmentTypeId/etsi:Identifier/@Qualifier", prefix2Namespace, "OIDAsURI"); checkXpath(doc, pathToDataObjectProperties + "etsi:CommitmentTypeIndication/etsi:CommitmentTypeId/etsi:Description/text()", prefix2Namespace, "description for commitment type ID"); checkXpath(doc, pathToDataObjectProperties + "etsi:CommitmentTypeIndication/etsi:CommitmentTypeId/etsi:DocumentationReferences/etsi:DocumentationReference[1]/text()", prefix2Namespace, "http://test.com/commitment.ref1.txt"); checkXpath(doc, pathToDataObjectProperties + "etsi:CommitmentTypeIndication/etsi:CommitmentTypeId/etsi:DocumentationReferences/etsi:DocumentationReference[2]/text()", prefix2Namespace, "http://test.com/commitment.ref2.txt"); checkXpath(doc, pathToDataObjectProperties + "etsi:CommitmentTypeIndication/etsi:CommitmentTypeQualifiers/etsi:CommitmentTypeQualifier[1]/text()", prefix2Namespace, "commitment qualifier"); checkXpath(doc, pathToDataObjectProperties + "etsi:CommitmentTypeIndication/etsi:CommitmentTypeQualifiers/etsi:CommitmentTypeQualifier[2]/C/text()", prefix2Namespace, "c"); } @Test public void noSigningTime() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setAddSigningTime(false); Document doc = testEnveloping(); Map<String, String> prefix2Namespace = getPrefix2NamespaceMap(); String pathToSignatureProperties = getPathToSignatureProperties(); checkNode(doc, pathToSignatureProperties + "etsi:SigningTime", prefix2Namespace, false); } @Test public void noSigningCertificate() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties newProps = new XAdESSignatureProperties(); newProps.setAddSigningTime(true); endpoint.getConfiguration().setProperties(newProps); Document doc = testEnveloping(); Map<String, String> prefix2Namespace = getPrefix2NamespaceMap(); String pathToSignatureProperties = getPathToSignatureProperties(); checkNode(doc, pathToSignatureProperties + "etsi:SigningTime", prefix2Namespace, true); checkNode(doc, pathToSignatureProperties + "etsi:SigningCertificate", prefix2Namespace, false); } @Test public void certificateChain() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); endpoint.getConfiguration().setProperties(new CertChainXAdESSignatureProperties()); Document doc = testEnveloping(); Map<String, String> prefix2Namespace = getPrefix2NamespaceMap(); String pathToSignatureProperties = getPathToSignatureProperties(); // signing certificate checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:CertDigest/ds:DigestMethod/@Algorithm", prefix2Namespace, DigestMethod.SHA256); checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:CertDigest/ds:DigestValue/text()", prefix2Namespace, NOT_EMPTY); checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:IssuerSerial/ds:X509IssuerName/text()", prefix2Namespace, NOT_EMPTY); checkXpath(doc, pathToSignatureProperties + "etsi:SigningCertificate/etsi:Cert/etsi:IssuerSerial/ds:X509SerialNumber/text()", prefix2Namespace, NOT_EMPTY); } @Test public void noPropertiesSpecified() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = new XAdESSignatureProperties(); props.setAddSigningTime(false); endpoint.getConfiguration().setProperties(props); Document doc = testEnveloping(); // expecting no Qualifying Properties checkNode(doc, "/ds:Signature/ds:Object/etsi:QualifyingProperties", getPrefix2NamespaceMap(), false); } @Test public void policyImplied() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSignaturePolicy(XAdESSignatureProperties.SIG_POLICY_IMPLIED); Document doc = testEnveloping(); String pathToSignatureProperties = getPathToSignatureProperties(); checkNode(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyId", getPrefix2NamespaceMap(), false); checkNode(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier/etsi:SignaturePolicyImplied", getPrefix2NamespaceMap(), true); } @Test public void policyNone() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSignaturePolicy(XAdESSignatureProperties.SIG_POLICY_NONE); Document doc = testEnveloping(); String pathToSignatureProperties = getPathToSignatureProperties(); checkNode(doc, pathToSignatureProperties + "etsi:SignaturePolicyIdentifier", getPrefix2NamespaceMap(), false); } @Test public void allPropertiesEmpty() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = new XAdESSignatureProperties(); props.setAddSigningTime(false); props.setCommitmentTypeId(""); props.setCommitmentTypeIdDescription(""); props.setCommitmentTypeIdQualifier(""); props.setDataObjectFormatDescription(""); props.setDataObjectFormatIdentifier(""); props.setDataObjectFormatIdentifierDescription(""); props.setDataObjectFormatIdentifierQualifier(""); props.setDataObjectFormatMimeType(""); props.setDigestAlgorithmForSigningCertificate(""); props.setSignaturePolicy("None"); props.setSigPolicyId(""); props.setSigPolicyIdDescription(""); props.setSigPolicyIdQualifier(""); props.setSignaturePolicyDigestAlgorithm(""); props.setSignaturePolicyDigestValue(""); props.setSignatureProductionPlaceCity(""); props.setSignatureProductionPlaceCountryName(""); props.setSignatureProductionPlacePostalCode(""); props.setSignatureProductionPlaceStateOrProvince(""); endpoint.getConfiguration().setProperties(props); Document doc = testEnveloping(); // expecting no Qualifying Properties checkNode(doc, "/ds:Signature/ds:Object/etsi:QualifyingProperties", getPrefix2NamespaceMap(), false); } @Test public void emptySignatureId() throws Exception { Document doc = testEnveloping("direct:emptySignatureId"); checkNode(doc, "/ds:Signature/ds:Object/etsi:QualifyingProperties", getPrefix2NamespaceMap(), true); } @Test public void prefixAndNamespace() throws Exception { XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setPrefix("p"); props.setNamespace(XAdESSignatureProperties.HTTP_URI_ETSI_ORG_01903_V1_1_1); props.setCommitmentTypeIdDescription(null); props.setCommitmentTypeIdDocumentationReferences(Collections.emptyList()); props.setCommitmentTypeIdQualifier(null); props.setDataObjectFormatIdentifierDescription(null); props.setDataObjectFormatIdentifierDocumentationReferences(Collections.emptyList()); props.setDataObjectFormatIdentifierQualifier(null); props.setSigPolicyIdDescription(null); props.setSigPolicyIdDocumentationReferences(Collections.emptyList()); props.setSigPolicyIdQualifier(null); // the following lists must be set to empty because otherwise they would contain XML fragments with a wrong namespace props.setSigPolicyQualifiers(Collections.emptyList()); props.setSignerClaimedRoles(Collections.emptyList()); props.setCommitmentTypeQualifiers(Collections.emptyList()); Document doc = testEnveloping(); Map<String, String> prefix2Namespace = new TreeMap<>(); prefix2Namespace.put("ds", XMLSignature.XMLNS); prefix2Namespace.put("etsi", XAdESSignatureProperties.HTTP_URI_ETSI_ORG_01903_V1_1_1); XPathExpression expr = getXpath("/ds:Signature/ds:Object/etsi:QualifyingProperties", prefix2Namespace); Object result = expr.evaluate(doc, XPathConstants.NODE); assertNotNull(result); Node node = (Node) result; assertEquals("p", node.getPrefix()); assertEquals(XAdESSignatureProperties.HTTP_URI_ETSI_ORG_01903_V1_1_1, node.getNamespaceURI()); } @Test public void headers() throws Exception { Map<String, Object> header = new TreeMap<>(); header.put(XmlSignatureConstants.HEADER_XADES_PREFIX, "ns1"); header.put(XmlSignatureConstants.HEADER_XADES_NAMESPACE, XAdESSignatureProperties.HTTP_URI_ETSI_ORG_01903_V1_2_2); header.put(XmlSignatureConstants.HEADER_XADES_QUALIFYING_PROPERTIES_ID, "QualId"); header.put(XmlSignatureConstants.HEADER_XADES_SIGNED_DATA_OBJECT_PROPERTIES_ID, "ObjId"); header.put(XmlSignatureConstants.HEADER_XADES_SIGNED_SIGNATURE_PROPERTIES_ID, "SigId"); header.put(XmlSignatureConstants.HEADER_XADES_DATA_OBJECT_FORMAT_ENCODING, "base64"); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); // the following lists must be set to empty because otherwise they would contain XML fragments with a wrong namespace props.setSigPolicyQualifiers(Collections.emptyList()); props.setSignerClaimedRoles(Collections.emptyList()); props.setCommitmentTypeQualifiers(Collections.emptyList()); Document doc = testEnveloping("direct:enveloping", header); Map<String, String> prefix2Namespace = new TreeMap<>(); prefix2Namespace.put("ds", XMLSignature.XMLNS); prefix2Namespace.put("etsi", XAdESSignatureProperties.HTTP_URI_ETSI_ORG_01903_V1_2_2); XPathExpression expr = getXpath("/ds:Signature/ds:Object/etsi:QualifyingProperties", prefix2Namespace); Object result = expr.evaluate(doc, XPathConstants.NODE); assertNotNull(result); Node node = (Node) result; assertEquals("ns1", node.getPrefix()); assertEquals(XAdESSignatureProperties.HTTP_URI_ETSI_ORG_01903_V1_2_2, node.getNamespaceURI()); checkXpath(doc, "/ds:Signature/ds:Object/etsi:QualifyingProperties/@Id", prefix2Namespace, "QualId"); checkXpath(doc, "/ds:Signature/ds:Object/etsi:QualifyingProperties/etsi:SignedProperties/etsi:SignedDataObjectProperties/@Id", prefix2Namespace, "ObjId"); checkXpath(doc, "/ds:Signature/ds:Object/etsi:QualifyingProperties/etsi:SignedProperties/etsi:SignedSignatureProperties/@Id", prefix2Namespace, "SigId"); checkXpath( doc, "/ds:Signature/ds:Object/etsi:QualifyingProperties/etsi:SignedProperties/etsi:SignedDataObjectProperties/etsi:DataObjectFormat/etsi:Encoding/text()", prefix2Namespace, "base64"); } @Test public void enveloped() throws Exception { setupMock(); sendBody("direct:enveloped", payload); assertMockEndpointsSatisfied(); } @Test public void detached() throws Exception { String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + // "<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\"><b>bValue</b></a></ns:root>"; setupMock(); sendBody("direct:detached", detachedPayload); assertMockEndpointsSatisfied(); } @Test public void sigPolicyIdEmpty() throws Exception { testExceptionSigPolicyIdMissing(""); } @Test public void sigPolicyIdNull() throws Exception { testExceptionSigPolicyIdMissing(null); } private void testExceptionSigPolicyIdMissing(String value) throws Exception { MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSigPolicyId(value); sendBody("direct:enveloping", payload, Collections.emptyMap()); assertMockEndpointsSatisfied(); checkThrownException(mock, XmlSignatureException.class, "The XAdES-EPES configuration is invalid. The signature policy identifier is missing.", null); } @Test public void sigPolicyDigestEmpty() throws Exception { testExceptionSigPolicyDigestMissing(""); } @Test public void sigPolicyDigestNull() throws Exception { testExceptionSigPolicyDigestMissing(null); } private void testExceptionSigPolicyDigestMissing(String value) throws Exception { MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSignaturePolicyDigestValue(value); sendBody("direct:enveloping", payload, Collections.emptyMap()); assertMockEndpointsSatisfied(); checkThrownException(mock, XmlSignatureException.class, "The XAdES-EPES configuration is invalid. The digest value for the signature policy is missing.", null); } @Test public void sigPolicyDigestAlgoEmpty() throws Exception { testExceptionSigPolicyDigestAlgoMissing(""); } @Test public void sigPolicyDigestAlgoNull() throws Exception { testExceptionSigPolicyDigestAlgoMissing(null); } private void testExceptionSigPolicyDigestAlgoMissing(String value) throws Exception { MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSignaturePolicyDigestAlgorithm(value); sendBody("direct:enveloping", payload, Collections.emptyMap()); assertMockEndpointsSatisfied(); checkThrownException(mock, XmlSignatureException.class, "The XAdES-EPES configuration is invalid. The digest algorithm for the signature policy is missing.", null); } @Test public void invalidXmlFragmentForClaimedRole() throws Exception { MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSignerClaimedRoles(Collections.singletonList("<ClaimedRole>wrong XML fragment<ClaimedRole>")); // Element 'ClaimedRole' is not closed correctly sendBody("direct:enveloping", payload, Collections.emptyMap()); assertMockEndpointsSatisfied(); checkThrownException( mock, XmlSignatureException.class, "The XAdES configuration is invalid. The list of the claimed roles contains the invalid entry '<ClaimedRole>wrong XML fragment<ClaimedRole>'. An entry must either be a text or" + " an XML fragment with the root element 'ClaimedRole' with the namespace 'http://uri.etsi.org/01903/v1.3.2#'.", null); } @Test public void invalidXmlFragmentForCommitmentTypeQualifier() throws Exception { MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setCommitmentTypeQualifiers( Collections.singletonList("<CommitmentTypeQualifier>wrong XML fragment<CommitmentTypeQualifier>")); // end tag is not correct sendBody("direct:enveloping", payload, Collections.emptyMap()); assertMockEndpointsSatisfied(); checkThrownException( mock, XmlSignatureException.class, "The XAdES configuration is invalid. The list of the commitment type qualifiers contains the invalid entry '<CommitmentTypeQualifier>wrong XML fragment<CommitmentTypeQualifier>'." + " An entry must either be a text or an XML fragment with the root element 'CommitmentTypeQualifier' with the namespace 'http://uri.etsi.org/01903/v1.3.2#'.", null); } @Test public void invalidXmlFragmentForSigPolicyQualifier() throws Exception { MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSigPolicyQualifiers(Collections.singletonList("<SigPolicyQualifier>wrong XML fragment<SigPolicyQualifier>")); // end tag is not correct sendBody("direct:enveloping", payload, Collections.emptyMap()); assertMockEndpointsSatisfied(); checkThrownException( mock, XmlSignatureException.class, "The XAdES configuration is invalid. The list of the signatue policy qualifiers contains the invalid entry '<SigPolicyQualifier>wrong XML fragment<SigPolicyQualifier>'." + " An entry must either be a text or an XML fragment with the root element 'SigPolicyQualifier' with the namespace 'http://uri.etsi.org/01903/v1.3.2#'.", null); } @Test public void invalidNamespaceForTheRootElementInXmlFragmentForSigPolicyQualifier() throws Exception { MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); XmlSignerEndpoint endpoint = getSignerEndpoint(); XAdESSignatureProperties props = (XAdESSignatureProperties) endpoint.getConfiguration().getProperties(); props.setSigPolicyQualifiers(Collections .singletonList( "<SigPolicyQualifier xmlns=\"http://invalid.com\">XML fragment with wrong namespace for root element</SigPolicyQualifier>")); sendBody("direct:enveloping", payload, Collections.emptyMap()); assertMockEndpointsSatisfied(); checkThrownException( mock, XmlSignatureException.class, "The XAdES configuration is invalid. The root element 'SigPolicyQualifier' of the provided XML fragment " + "'<SigPolicyQualifier xmlns=\"http://invalid.com\">XML fragment with wrong namespace for root element</SigPolicyQualifier>' has the invalid namespace 'http://invalid.com'." + " The correct namespace is 'http://uri.etsi.org/01903/v1.3.2#'.", null); } @Test public void namespaceNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setNamespace(null)); } @Test public void signingCertificateURIsNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSigningCertificateURIs(null)); } @Test public void sigPolicyInvalid() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSignaturePolicy("invalid")); } @Test public void sigPolicyIdDocumentationReferencesNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSigPolicyIdDocumentationReferences(null)); } @Test public void sigPolicyIdDocumentationReferencesNullEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> sigPolicyIdDocumentationReferences = Collections.<String> singletonList(null); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties .setSigPolicyIdDocumentationReferences(sigPolicyIdDocumentationReferences)); } @Test public void sigPolicyIdDocumentationReferencesEmptyEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> sigPolicyIdDocumentationReferences = Collections.singletonList(""); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties .setSigPolicyIdDocumentationReferences(sigPolicyIdDocumentationReferences)); } @Test public void dataObjectFormatIdentifierDocumentationReferencesNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setDataObjectFormatIdentifierDocumentationReferences(null)); } @Test public void dataObjectFormatIdentifierDocumentationReferencesNullEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> dataObjectFormatIdentifierDocumentationReferences = Collections.singletonList(null); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setDataObjectFormatIdentifierDocumentationReferences( dataObjectFormatIdentifierDocumentationReferences)); } @Test public void dataObjectFormatIdentifierDocumentationReferencesEmptyEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> dataObjectFormatIdentifierDocumentationReferences = Collections.singletonList(""); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setDataObjectFormatIdentifierDocumentationReferences( dataObjectFormatIdentifierDocumentationReferences)); } @Test public void signerClaimedRolesNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSignerClaimedRoles(null)); } @Test public void signerClaimedRolesNullEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> signerClaimedRoles = Collections.singletonList(null); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSignerClaimedRoles(signerClaimedRoles)); } @Test public void signerClaimedRolesEmptyEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> signerClaimedRoles = Collections.singletonList(""); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSignerClaimedRoles(signerClaimedRoles)); } @Test public void signerCertifiedRolesNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSignerCertifiedRoles(null)); } @Test public void signerCertifiedRolesNullEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<XAdESEncapsulatedPKIData> signerCertifiedRoles = Collections.singletonList(null); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setSignerCertifiedRoles(signerCertifiedRoles)); } @Test public void commitmentTypeIdDocumentationReferencesNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setCommitmentTypeIdDocumentationReferences(null)); } @Test public void commitmentTypeIdDocumentationReferencesNullEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> commitmentTypeIdDocumentationReferences = Collections.singletonList(null); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties .setCommitmentTypeIdDocumentationReferences(commitmentTypeIdDocumentationReferences)); } @Test public void commitmentTypeIdDocumentationReferencesEmptyEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> commitmentTypeIdDocumentationReferences = Collections.singletonList(""); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties .setCommitmentTypeIdDocumentationReferences(commitmentTypeIdDocumentationReferences)); } @Test public void commitmentTypeQualifiersNull() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setCommitmentTypeQualifiers(null)); } @Test public void commitmentTypeQualifiersNullEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> commitmentTypeQualifiers = Collections.singletonList(null); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setCommitmentTypeQualifiers(commitmentTypeQualifiers)); } @Test public void commitmentTypeQualifiersEmptyEntry() throws Exception { final XAdESSignatureProperties xAdESSignatureProperties = new XAdESSignatureProperties(); final List<String> commitmentTypeQualifiers = Collections.singletonList(""); assertThrows(IllegalArgumentException.class, () -> xAdESSignatureProperties.setCommitmentTypeQualifiers(commitmentTypeQualifiers)); } // private XmlSignerEndpoint getSignerEndpoint() { return (XmlSignerEndpoint) context().getEndpoint( "xmlsecurity-sign:xades?keyAccessor=#keyAccessorDefault&properties=#xmlSignatureProperties"); } private String getPathToSignatureProperties() { return "/ds:Signature/ds:Object/etsi:QualifyingProperties/etsi:SignedProperties/etsi:SignedSignatureProperties/"; } static Map<String, String> getPrefix2NamespaceMap() { Map<String, String> prefix2Namespace = new TreeMap<>(); prefix2Namespace.put("ds", XMLSignature.XMLNS); prefix2Namespace.put("etsi", XAdESSignatureProperties.HTTP_URI_ETSI_ORG_01903_V1_3_2); return prefix2Namespace; } private Document testEnveloping() throws InterruptedException, SAXException, IOException, ParserConfigurationException, Exception { return testEnveloping("direct:enveloping"); } protected Document testEnveloping(String fromUri) throws InterruptedException, SAXException, IOException, ParserConfigurationException, Exception { return testEnveloping(fromUri, Collections.<String, Object> emptyMap()); } protected Document testEnveloping(String fromUri, Map<String, Object> headers) throws InterruptedException, SAXException, IOException, ParserConfigurationException, Exception { MockEndpoint mock = setupMock(); sendBody(fromUri, payload, headers); assertMockEndpointsSatisfied(); Message message = getMessage(mock); byte[] body = message.getBody(byte[].class); Document doc = XmlSignatureHelper.newDocumentBuilder(true).parse(new ByteArrayInputStream(body)); validateAgainstSchema(doc); return doc; } private MockEndpoint setupMock() { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); return mock; } private static XmlSignatureProperties getXmlSignatureProperties(String alias) throws IOException, GeneralSecurityException { DefaultXAdESSignatureProperties props = new DefaultXAdESSignatureProperties(); props.setKeystore(TestKeystore.getKeyStore()); props.setAlias(alias); props.setAddSigningTime(true); props.setDigestAlgorithmForSigningCertificate(DigestMethod.SHA256); props.setSigningCertificateURIs(Collections.singletonList("http://certuri")); // policy props.setSignaturePolicy(XAdESSignatureProperties.SIG_POLICY_EXPLICIT_ID); props.setSigPolicyId("1.2.840.113549.1.9.16.6.1"); props.setSigPolicyIdQualifier("OIDAsURN"); props.setSigPolicyIdDescription("invoice version 3.1"); props.setSignaturePolicyDigestAlgorithm(DigestMethod.SHA256); props.setSignaturePolicyDigestValue("Ohixl6upD6av8N7pEvDABhEL6hM="); props.setSigPolicyQualifiers(Arrays .asList(new String[] { "<SigPolicyQualifier xmlns=\"http://uri.etsi.org/01903/v1.3.2#\"><SPURI>http://test.com/sig.policy.pdf</SPURI><SPUserNotice><ExplicitText>display text</ExplicitText>" + "</SPUserNotice></SigPolicyQualifier>", "category B" })); props.setSigPolicyIdDocumentationReferences(Arrays.asList(new String[] { "http://test.com/policy.doc.ref1.txt", "http://test.com/policy.doc.ref2.txt" })); // production place props.setSignatureProductionPlaceCity("Munich"); props.setSignatureProductionPlaceCountryName("Germany"); props.setSignatureProductionPlacePostalCode("80331"); props.setSignatureProductionPlaceStateOrProvince("Bavaria"); //role props.setSignerClaimedRoles(Arrays.asList(new String[] { "test", "<a:ClaimedRole xmlns:a=\"http://uri.etsi.org/01903/v1.3.2#\"><TestRole>TestRole</TestRole></a:ClaimedRole>" })); props.setSignerCertifiedRoles(Collections.singletonList(new XAdESEncapsulatedPKIData( "Ahixl6upD6av8N7pEvDABhEL6hM=", "http://uri.etsi.org/01903/v1.2.2#DER", "IdCertifiedRole"))); // data object format props.setDataObjectFormatDescription("invoice"); props.setDataObjectFormatMimeType("text/xml"); props.setDataObjectFormatIdentifier("1.2.840.113549.1.9.16.6.2"); props.setDataObjectFormatIdentifierQualifier("OIDAsURN"); props.setDataObjectFormatIdentifierDescription("identifier desc"); props.setDataObjectFormatIdentifierDocumentationReferences(Arrays.asList(new String[] { "http://test.com/dataobject.format.doc.ref1.txt", "http://test.com/dataobject.format.doc.ref2.txt" })); //commitment props.setCommitmentTypeId("1.2.840.113549.1.9.16.6.4"); props.setCommitmentTypeIdQualifier("OIDAsURI"); props.setCommitmentTypeIdDescription("description for commitment type ID"); props.setCommitmentTypeIdDocumentationReferences(Arrays.asList(new String[] { "http://test.com/commitment.ref1.txt", "http://test.com/commitment.ref2.txt" })); props.setCommitmentTypeQualifiers(Arrays.asList(new String[] { "commitment qualifier", "<c:CommitmentTypeQualifier xmlns:c=\"http://uri.etsi.org/01903/v1.3.2#\"><C>c</C></c:CommitmentTypeQualifier>" })); return props; } private void validateAgainstSchema(Document doc) throws Exception { SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Source schema1 = new StreamSource(new File("target/test-classes/org/apache/camel/component/xmlsecurity/xades/XAdES.xsd")); Source schema2 = new StreamSource( new File( "target/test-classes/org/apache/camel/component/xmlsecurity/xades/xmldsig-core-schema.xsd")); Schema schema = factory.newSchema(new Source[] { schema2, schema1 }); Validator validator = schema.newValidator(); validator.validate(new DOMSource(doc)); } static void checkXpath(Document doc, String xpathString, final Map<String, String> prefix2Namespace, String expectedResult) throws XPathExpressionException { checkXpath(doc, xpathString, prefix2Namespace, expectedResult, false); } static void checkXpath( Document doc, String xpathString, final Map<String, String> prefix2Namespace, String expectedResult, boolean startsWith) throws XPathExpressionException { XPathExpression expr = getXpath(xpathString, prefix2Namespace); String result = (String) expr.evaluate(doc, XPathConstants.STRING); assertNotNull("The xpath " + xpathString + " returned a null value", result); if (startsWith) { assertTrue(result.startsWith(expectedResult)); } else if (NOT_EMPTY.equals(expectedResult)) { assertTrue(!result.isEmpty(), "Not empty result for xpath " + xpathString + " expected"); } else { assertEquals(expectedResult, result); } } private void checkNode(Document doc, String xpathString, final Map<String, String> prefix2Namespace, boolean exists) throws XPathExpressionException { XPathExpression expr = getXpath(xpathString, prefix2Namespace); Object result = expr.evaluate(doc, XPathConstants.NODE); if (exists) { assertNotNull(result, "The xpath " + xpathString + " returned null, expected was a node"); } else { assertNull(result, "The xpath " + xpathString + " returned a node, expected was none: "); } } static XPathExpression getXpath(String xpathString, final Map<String, String> prefix2Namespace) throws XPathExpressionException { XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); NamespaceContext nc = new NamespaceContext() { @SuppressWarnings("rawtypes") @Override public Iterator getPrefixes(String namespaceURI) { return null; } @Override public String getPrefix(String namespaceURI) { return null; } @Override public String getNamespaceURI(String prefix) { return prefix2Namespace.get(prefix); } }; xpath.setNamespaceContext(nc); XPathExpression expr = xpath.compile(xpathString); return expr; } private Message getMessage(MockEndpoint mock) { List<Exchange> exs = mock.getExchanges(); assertNotNull(exs); assertEquals(1, exs.size()); Exchange ex = exs.get(0); Message mess = ex.getIn(); assertNotNull(mess); return mess; } private static class CertChainXAdESSignatureProperties extends XAdESSignatureProperties { private KeyStore keystore = getKeystore(); private String alias = "bob"; CertChainXAdESSignatureProperties() { setAddSigningTime(false); } @Override protected X509Certificate getSigningCertificate() throws Exception { return null; } @Override protected X509Certificate[] getSigningCertificateChain() throws Exception { Certificate[] certs = keystore.getCertificateChain(alias); X509Certificate[] result = new X509Certificate[certs.length]; int counter = 0; for (Certificate cert : certs) { result[counter] = (X509Certificate) cert; counter++; } return result; } private static KeyStore getKeystore() { try { return TestKeystore.getKeyStore(); } catch (Exception e) { throw new IllegalStateException(e); } } } }
/* Copyright 2022 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; import java.util.Iterator; import java.util.List; /** Generated */ public class V1beta2FlowSchemaStatusFluentImpl< A extends io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent<A>> extends io.kubernetes.client.fluent.BaseFluent<A> implements io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent<A> { public V1beta2FlowSchemaStatusFluentImpl() {} public V1beta2FlowSchemaStatusFluentImpl( io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatus instance) { this.withConditions(instance.getConditions()); } private java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder> conditions; public A addToConditions( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder>(); } io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(item); _visitables .get("conditions") .add(index >= 0 ? index : _visitables.get("conditions").size(), builder); this.conditions.add(index >= 0 ? index : conditions.size(), builder); return (A) this; } public A setToConditions( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder>(); } io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(item); if (index < 0 || index >= _visitables.get("conditions").size()) { _visitables.get("conditions").add(builder); } else { _visitables.get("conditions").set(index, builder); } if (index < 0 || index >= conditions.size()) { conditions.add(builder); } else { conditions.set(index, builder); } return (A) this; } public A addToConditions( io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition... items) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder>(); } for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(item); _visitables.get("conditions").add(builder); this.conditions.add(builder); } return (A) this; } public A addAllToConditions( java.util.Collection<io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition> items) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder>(); } for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(item); _visitables.get("conditions").add(builder); this.conditions.add(builder); } return (A) this; } public A removeFromConditions( io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition... items) { for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(item); _visitables.get("conditions").remove(builder); if (this.conditions != null) { this.conditions.remove(builder); } } return (A) this; } public A removeAllFromConditions( java.util.Collection<io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition> items) { for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(item); _visitables.get("conditions").remove(builder); if (this.conditions != null) { this.conditions.remove(builder); } } return (A) this; } public A removeMatchingFromConditions( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder> predicate) { if (conditions == null) return (A) this; final Iterator<io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder> each = conditions.iterator(); final List visitables = _visitables.get("conditions"); while (each.hasNext()) { io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder = each.next(); if (predicate.test(builder)) { visitables.remove(builder); each.remove(); } } return (A) this; } /** * This method has been deprecated, please use method buildConditions instead. * * @return The buildable object. */ @java.lang.Deprecated public java.util.List<io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition> getConditions() { return conditions != null ? build(conditions) : null; } public java.util.List<io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition> buildConditions() { return conditions != null ? build(conditions) : null; } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition buildCondition( java.lang.Integer index) { return this.conditions.get(index).build(); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition buildFirstCondition() { return this.conditions.get(0).build(); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition buildLastCondition() { return this.conditions.get(conditions.size() - 1).build(); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition buildMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder> predicate) { for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder item : conditions) { if (predicate.test(item)) { return item.build(); } } return null; } public java.lang.Boolean hasMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder> predicate) { for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder item : conditions) { if (predicate.test(item)) { return true; } } return false; } public A withConditions( java.util.List<io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition> conditions) { if (this.conditions != null) { _visitables.get("conditions").removeAll(this.conditions); } if (conditions != null) { this.conditions = new java.util.ArrayList(); for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item : conditions) { this.addToConditions(item); } } else { this.conditions = null; } return (A) this; } public A withConditions( io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition... conditions) { if (this.conditions != null) { this.conditions.clear(); } if (conditions != null) { for (io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item : conditions) { this.addToConditions(item); } } return (A) this; } public java.lang.Boolean hasConditions() { return conditions != null && !conditions.isEmpty(); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<A> addNewCondition() { return new io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluentImpl .ConditionsNestedImpl(); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<A> addNewConditionLike(io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item) { return new io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluentImpl .ConditionsNestedImpl(-1, item); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<A> setNewConditionLike( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item) { return new io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluentImpl .ConditionsNestedImpl(index, item); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<A> editCondition(java.lang.Integer index) { if (conditions.size() <= index) throw new RuntimeException("Can't edit conditions. Index exceeds size."); return setNewConditionLike(index, buildCondition(index)); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<A> editFirstCondition() { if (conditions.size() == 0) throw new RuntimeException("Can't edit first conditions. The list is empty."); return setNewConditionLike(0, buildCondition(0)); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<A> editLastCondition() { int index = conditions.size() - 1; if (index < 0) throw new RuntimeException("Can't edit last conditions. The list is empty."); return setNewConditionLike(index, buildCondition(index)); } public io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<A> editMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder> predicate) { int index = -1; for (int i = 0; i < conditions.size(); i++) { if (predicate.test(conditions.get(i))) { index = i; break; } } if (index < 0) throw new RuntimeException("Can't edit matching conditions. No match found."); return setNewConditionLike(index, buildCondition(index)); } public boolean equals(java.lang.Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; V1beta2FlowSchemaStatusFluentImpl that = (V1beta2FlowSchemaStatusFluentImpl) o; if (conditions != null ? !conditions.equals(that.conditions) : that.conditions != null) return false; return true; } public int hashCode() { return java.util.Objects.hash(conditions, super.hashCode()); } public class ConditionsNestedImpl<N> extends io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionFluentImpl< io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested<N>> implements io.kubernetes.client.openapi.models.V1beta2FlowSchemaStatusFluent.ConditionsNested< N>, io.kubernetes.client.fluent.Nested<N> { ConditionsNestedImpl( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta2FlowSchemaCondition item) { this.index = index; this.builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(this, item); } ConditionsNestedImpl() { this.index = -1; this.builder = new io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder(this); } io.kubernetes.client.openapi.models.V1beta2FlowSchemaConditionBuilder builder; java.lang.Integer index; public N and() { return (N) V1beta2FlowSchemaStatusFluentImpl.this.setToConditions(index, builder.build()); } public N endCondition() { return and(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.twill.zookeeper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import org.apache.twill.internal.zookeeper.InMemoryZKServer; import org.apache.twill.internal.zookeeper.KillZKSession; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.server.auth.DigestAuthenticationProvider; import org.junit.Assert; import org.junit.ClassRule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.net.ServerSocket; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; /** * */ public class ZKClientTest { private static final Logger LOG = LoggerFactory.getLogger(ZKClientTest.class); @ClassRule public static TemporaryFolder tmpFolder = new TemporaryFolder(); @Test public void testChroot() throws Exception { InMemoryZKServer zkServer = InMemoryZKServer.builder().setTickTime(1000).build(); zkServer.startAndWait(); try { ZKClientService client = ZKClientService.Builder.of(zkServer.getConnectionStr() + "/chroot").build(); client.startAndWait(); try { List<OperationFuture<String>> futures = Lists.newArrayList(); futures.add(client.create("/test1/test2", null, CreateMode.PERSISTENT)); futures.add(client.create("/test1/test3", null, CreateMode.PERSISTENT)); Futures.successfulAsList(futures).get(); Assert.assertNotNull(client.exists("/test1/test2").get()); Assert.assertNotNull(client.exists("/test1/test3").get()); } finally { client.stopAndWait(); } } finally { zkServer.stopAndWait(); } } @Test public void testCreateParent() throws ExecutionException, InterruptedException { InMemoryZKServer zkServer = InMemoryZKServer.builder().setTickTime(1000).build(); zkServer.startAndWait(); try { ZKClientService client = ZKClientService.Builder.of(zkServer.getConnectionStr()).build(); client.startAndWait(); try { String path = client.create("/test1/test2/test3/test4/test5", "testing".getBytes(), CreateMode.PERSISTENT_SEQUENTIAL).get(); Assert.assertTrue(path.startsWith("/test1/test2/test3/test4/test5")); String dataPath = ""; for (int i = 1; i <= 4; i++) { dataPath = dataPath + "/test" + i; Assert.assertNull(client.getData(dataPath).get().getData()); } Assert.assertTrue(Arrays.equals("testing".getBytes(), client.getData(path).get().getData())); } finally { client.stopAndWait(); } } finally { zkServer.stopAndWait(); } } @Test public void testGetChildren() throws ExecutionException, InterruptedException { InMemoryZKServer zkServer = InMemoryZKServer.builder().setTickTime(1000).build(); zkServer.startAndWait(); try { ZKClientService client = ZKClientService.Builder.of(zkServer.getConnectionStr()).build(); client.startAndWait(); try { client.create("/test", null, CreateMode.PERSISTENT).get(); Assert.assertTrue(client.getChildren("/test").get().getChildren().isEmpty()); Futures.allAsList(ImmutableList.of(client.create("/test/c1", null, CreateMode.EPHEMERAL), client.create("/test/c2", null, CreateMode.EPHEMERAL))).get(); NodeChildren nodeChildren = client.getChildren("/test").get(); Assert.assertEquals(2, nodeChildren.getChildren().size()); Assert.assertEquals(ImmutableSet.of("c1", "c2"), ImmutableSet.copyOf(nodeChildren.getChildren())); } finally { client.stopAndWait(); } } finally { zkServer.stopAndWait(); } } @Test public void testSetData() throws ExecutionException, InterruptedException { InMemoryZKServer zkServer = InMemoryZKServer.builder().setTickTime(1000).build(); zkServer.startAndWait(); try { ZKClientService client = ZKClientService.Builder.of(zkServer.getConnectionStr()).build(); client.startAndWait(); client.create("/test", null, CreateMode.PERSISTENT).get(); Assert.assertNull(client.getData("/test").get().getData()); client.setData("/test", "testing".getBytes()).get(); Assert.assertTrue(Arrays.equals("testing".getBytes(), client.getData("/test").get().getData())); } finally { zkServer.stopAndWait(); } } @Test public void testExpireRewatch() throws InterruptedException, IOException, ExecutionException { InMemoryZKServer zkServer = InMemoryZKServer.builder().setTickTime(1000).build(); zkServer.startAndWait(); try { final CountDownLatch expireReconnectLatch = new CountDownLatch(1); final AtomicBoolean expired = new AtomicBoolean(false); final ZKClientService client = ZKClientServices.delegate(ZKClients.reWatchOnExpire( ZKClientService.Builder.of(zkServer.getConnectionStr()) .setSessionTimeout(2000) .setConnectionWatcher(new Watcher() { @Override public void process(WatchedEvent event) { if (event.getState() == Event.KeeperState.Expired) { expired.set(true); } else if (event.getState() == Event.KeeperState.SyncConnected && expired.compareAndSet(true, true)) { expireReconnectLatch.countDown(); } } }).build())); client.startAndWait(); try { final BlockingQueue<Watcher.Event.EventType> events = new LinkedBlockingQueue<>(); client.exists("/expireRewatch", new Watcher() { @Override public void process(final WatchedEvent event) { Futures.addCallback(client.exists("/expireRewatch", this), new FutureCallback<Stat>() { @Override public void onSuccess(Stat result) { events.add(event.getType()); } @Override public void onFailure(Throwable t) { LOG.error("Failed to call exists on /expireRewatch", t); } }); } }); client.create("/expireRewatch", null, CreateMode.PERSISTENT); Assert.assertEquals(Watcher.Event.EventType.NodeCreated, events.poll(60, TimeUnit.SECONDS)); KillZKSession.kill(client.getZooKeeperSupplier().get(), zkServer.getConnectionStr(), 10000); Assert.assertTrue(expireReconnectLatch.await(60, TimeUnit.SECONDS)); // Keep trying to delete the node until it succeed while (ZKOperations.ignoreError(client.delete("/expireRewatch"), KeeperException.class, null).get() == null) { LOG.info("Delete failed. Retrying to delete /expireRewatch"); TimeUnit.MILLISECONDS.sleep(10); } Assert.assertEquals(Watcher.Event.EventType.NodeDeleted, events.poll(60, TimeUnit.SECONDS)); } finally { client.stopAndWait(); } } finally { zkServer.stopAndWait(); } } @Test public void testRetry() throws ExecutionException, InterruptedException, TimeoutException, IOException { File dataDir = tmpFolder.newFolder(); InMemoryZKServer zkServer = InMemoryZKServer.builder().setDataDir(dataDir).setTickTime(1000).build(); zkServer.startAndWait(); int port = zkServer.getLocalAddress().getPort(); final CountDownLatch disconnectLatch = new CountDownLatch(1); ZKClientService client = ZKClientServices.delegate(ZKClients.retryOnFailure( ZKClientService.Builder.of(zkServer.getConnectionStr()).setConnectionWatcher(new Watcher() { @Override public void process(WatchedEvent event) { if (event.getState() == Event.KeeperState.Disconnected) { disconnectLatch.countDown(); } } }).build(), RetryStrategies.fixDelay(0, TimeUnit.SECONDS))); final CountDownLatch createLatch = new CountDownLatch(1); client.startAndWait(); try { zkServer.stopAndWait(); Assert.assertTrue(disconnectLatch.await(1, TimeUnit.SECONDS)); Futures.addCallback(client.create("/testretry/test", null, CreateMode.PERSISTENT), new FutureCallback<String>() { @Override public void onSuccess(String result) { createLatch.countDown(); } @Override public void onFailure(Throwable t) { t.printStackTrace(System.out); } }); TimeUnit.SECONDS.sleep(2); zkServer = InMemoryZKServer.builder() .setDataDir(dataDir) .setAutoCleanDataDir(true) .setPort(port) .setTickTime(1000) .build(); zkServer.startAndWait(); try { Assert.assertTrue(createLatch.await(10, TimeUnit.SECONDS)); } finally { zkServer.stopAndWait(); } } finally { client.stopAndWait(); } } @Test public void testACL() throws IOException, ExecutionException, InterruptedException, NoSuchAlgorithmException { InMemoryZKServer zkServer = InMemoryZKServer.builder().setDataDir(tmpFolder.newFolder()).setTickTime(1000).build(); zkServer.startAndWait(); try { String userPass = "user:pass"; String digest = DigestAuthenticationProvider.generateDigest(userPass); // Creates two zkclients ZKClientService zkClient = ZKClientService.Builder .of(zkServer.getConnectionStr()) .addAuthInfo("digest", userPass.getBytes()) .build(); zkClient.startAndWait(); ZKClientService noAuthClient = ZKClientService.Builder.of(zkServer.getConnectionStr()).build(); noAuthClient.startAndWait(); // Create a node that is readable by all client, but admin for the creator String path = "/testacl"; zkClient.create(path, "test".getBytes(), CreateMode.PERSISTENT, ImmutableList.of( new ACL(ZooDefs.Perms.READ, ZooDefs.Ids.ANYONE_ID_UNSAFE), new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.AUTH_IDS) )).get(); // Verify the ACL ACLData aclData = zkClient.getACL(path).get(); Assert.assertEquals(2, aclData.getACL().size()); ACL acl = aclData.getACL().get(1); Assert.assertEquals(ZooDefs.Perms.ALL, acl.getPerms()); Assert.assertEquals("digest", acl.getId().getScheme()); Assert.assertEquals(digest, acl.getId().getId()); Assert.assertArrayEquals("test".getBytes(), noAuthClient.getData(path).get().getData()); // When tries to write using the no-auth zk client, it should fail. try { noAuthClient.setData(path, "test2".getBytes()).get(); Assert.fail(); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof KeeperException.NoAuthException); } // Change ACL to make it open for all zkClient.setACL(path, ImmutableList.of(new ACL(ZooDefs.Perms.WRITE, ZooDefs.Ids.ANYONE_ID_UNSAFE))).get(); // Write again with the non-auth client, now should succeed. noAuthClient.setData(path, "test2".getBytes()).get(); noAuthClient.stopAndWait(); zkClient.stopAndWait(); } finally { zkServer.stopAndWait(); } } @Test (timeout = 120000L) public void testDeadlock() throws IOException, InterruptedException { // This is to test deadlock bug as described in (TWILL-110) // This test has very high chance to get deadlock before the bug fix, hence failed with timeout. InMemoryZKServer zkServer = InMemoryZKServer.builder().setDataDir(tmpFolder.newFolder()).build(); zkServer.startAndWait(); try { for (int i = 0; i < 5000; i++) { final ZKClientService zkClient = ZKClientService.Builder.of(zkServer.getConnectionStr()).build(); zkClient.addConnectionWatcher(new Watcher() { @Override public void process(WatchedEvent event) { LOG.debug("Connection event: {}", event); } }); zkClient.startAndWait(); zkClient.stopAndWait(); } } finally { zkServer.stopAndWait(); } } @Test public void testStop() throws IOException, InterruptedException, ExecutionException { try (final ServerSocket serverSocket = new ServerSocket(0)) { // A latch to make sure at least one connection attempt from the zk client has been made final CountDownLatch connectLatch = new CountDownLatch(1); Thread serverThread = new Thread() { public void run() { try { while (!interrupted()) { serverSocket.accept().close(); connectLatch.countDown(); } } catch (Exception e) { // no-op } } }; serverThread.start(); ZKClientService zkClient = ZKClientService.Builder.of("localhost:" + serverSocket.getLocalPort()).build(); zkClient.start(); Assert.assertTrue(connectLatch.await(10, TimeUnit.SECONDS)); zkClient.stopAndWait(); serverThread.interrupt(); } } @Test public void testNamespace() throws ExecutionException, InterruptedException { InMemoryZKServer zkServer = InMemoryZKServer.builder().setTickTime(1000).build(); zkServer.startAndWait(); try { ZKClientService zkClient = ZKClientService.Builder .of(zkServer.getConnectionStr()) .build(); zkClient.startAndWait(); ZKClient zk = ZKClients.namespace(zkClient, "/test"); // Create the "/ should create the "/test" from the root OperationFuture<String> createFuture = zk.create("/", null, CreateMode.PERSISTENT); // Shouldn't have namespace as prefix for path returned from the future. Assert.assertEquals("/", createFuture.getRequestPath()); Assert.assertEquals("/", createFuture.get()); // Create a path under the namespace createFuture = zk.create("/subpath", null, CreateMode.PERSISTENT); Assert.assertEquals("/subpath", createFuture.getRequestPath()); Assert.assertEquals("/subpath", createFuture.get()); // Check for exists OperationFuture<Stat> existsFuture = zk.exists("/subpath"); Assert.assertEquals("/subpath", existsFuture.getRequestPath()); Assert.assertNotNull(existsFuture.get()); // Put some data OperationFuture<Stat> setFuture = zk.setData("/subpath", "hello".getBytes()); Assert.assertEquals("/subpath", setFuture.getRequestPath()); Assert.assertNotNull(setFuture.get()); // Read the data back OperationFuture<NodeData> getFuture = zk.getData("/subpath"); Assert.assertEquals("/subpath", getFuture.getRequestPath()); Assert.assertArrayEquals("hello".getBytes(), getFuture.get().getData()); // Delete the sub path OperationFuture < String > deleteFuture = zk.delete("/subpath"); Assert.assertEquals("/subpath", deleteFuture.getRequestPath()); Assert.assertEquals("/subpath", deleteFuture.get()); // Delete the namespace root deleteFuture = zk.delete("/"); Assert.assertEquals("/", deleteFuture.getRequestPath()); Assert.assertEquals("/", deleteFuture.get()); // The namespace must be gone Assert.assertNull(zkClient.exists("/test").get()); } finally { zkServer.stopAndWait(); } } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.workbench.pr.client.editors.definition.list; import java.util.ArrayList; import java.util.List; import com.google.gwt.user.cellview.client.ColumnSortList; import com.google.gwt.view.client.Range; import com.google.gwtmockito.GwtMockitoTestRunner; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.security.shared.api.identity.User; import org.jbpm.workbench.common.client.PerspectiveIds; import org.jbpm.workbench.common.client.list.ExtendedPagedTable; import org.jbpm.workbench.forms.client.display.providers.StartProcessFormDisplayProviderImpl; import org.jbpm.workbench.forms.client.display.views.PopupFormDisplayerView; import org.jbpm.workbench.forms.display.api.ProcessDisplayerConfig; import org.jbpm.workbench.pr.client.resources.i18n.Constants; import org.jbpm.workbench.pr.events.ProcessDefSelectionEvent; import org.jbpm.workbench.pr.model.ProcessSummary; import org.jbpm.workbench.pr.service.ProcessRuntimeDataService; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.workbench.client.error.DefaultWorkbenchErrorCallback; import org.mockito.ArgumentCaptor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.uberfire.client.mvp.PerspectiveActivity; import org.uberfire.client.mvp.PerspectiveManager; import org.uberfire.client.mvp.PlaceManager; import org.uberfire.ext.widgets.common.client.breadcrumbs.UberfireBreadcrumbs; import org.uberfire.mocks.CallerMock; import org.uberfire.mocks.EventSourceMock; import org.uberfire.mvp.Command; import org.uberfire.mvp.Commands; import org.uberfire.mvp.PlaceRequest; import org.uberfire.security.ResourceRef; import org.uberfire.security.authz.AuthorizationManager; import org.uberfire.workbench.model.ActivityResourceType; import static java.util.Collections.emptyList; import static org.jbpm.workbench.common.client.PerspectiveIds.SEARCH_PARAMETER_PROCESS_DEFINITION_ID; import static org.junit.Assert.*; import static org.kie.workbench.common.workbench.client.PerspectiveIds.PROCESS_INSTANCES; import static org.mockito.Mockito.*; @RunWith(GwtMockitoTestRunner.class) public class ProcessDefinitionListPresenterTest { private static final String PERSPECTIVE_ID = PerspectiveIds.PROCESS_DEFINITIONS; private org.jbpm.workbench.common.client.resources.i18n.Constants commonConstants; @Mock protected PlaceManager placeManager; @Mock private UberfireBreadcrumbs breadcrumbs; @Mock private AuthorizationManager authorizationManager; @Mock private User identity; @Mock private PerspectiveManager perspectiveManager; @Mock private PerspectiveActivity perspectiveActivity; @Mock protected EventSourceMock<ProcessDefSelectionEvent> processDefSelectionEvent; @Mock ProcessDefinitionListPresenter.ProcessDefinitionListView view; @Mock ExtendedPagedTable extendedPagedTable; Caller<ProcessRuntimeDataService> processRuntimeDataServiceCaller; @Mock ProcessRuntimeDataService processRuntimeDataService; @Mock StartProcessFormDisplayProviderImpl startProcessDisplayProvider; @Mock PopupFormDisplayerView formDisplayPopUp; @Mock DefaultWorkbenchErrorCallback errorCallback; @InjectMocks @Spy ProcessDefinitionListPresenter presenter; private static List<ProcessSummary> getMockList(int instances) { final List<ProcessSummary> summaries = new ArrayList<>(); for (int i = 0; i < instances; i++) { summaries.add(new ProcessSummary()); } return summaries; } @Before public void setup() { processRuntimeDataServiceCaller = new CallerMock<ProcessRuntimeDataService>(processRuntimeDataService); presenter.setProcessRuntimeDataService(processRuntimeDataServiceCaller); when(view.getListGrid()).thenReturn(extendedPagedTable); when(extendedPagedTable.getColumnSortList()).thenReturn(new ColumnSortList()); when(perspectiveManager.getCurrentPerspective()).thenReturn(perspectiveActivity); when(perspectiveActivity.getIdentifier()).thenReturn(PERSPECTIVE_ID); commonConstants = org.jbpm.workbench.common.client.resources.i18n.Constants.INSTANCE; } @Test public void testProcessDefNameDefinitionPropagation() { final ProcessSummary processSummary = new ProcessSummary(); processSummary.setProcessDefId("testProcessDefId"); processSummary.setDeploymentId("testDeploymentId"); processSummary.setProcessDefName("testProcessDefName"); processSummary.setDynamic(false); presenter.selectProcessDefinition(processSummary); verify(processDefSelectionEvent).fire(any(ProcessDefSelectionEvent.class)); ArgumentCaptor<ProcessDefSelectionEvent> argument = ArgumentCaptor.forClass(ProcessDefSelectionEvent.class); verify(processDefSelectionEvent).fire(argument.capture()); final ProcessDefSelectionEvent event = argument.getValue(); assertEquals(processSummary.getProcessDefName(), event.getProcessDefName()); assertEquals(processSummary.getDeploymentId(), event.getDeploymentId()); assertEquals(processSummary.getProcessDefId(), event.getProcessId()); assertEquals(processSummary.isDynamic(), event.isDynamic()); verify(breadcrumbs).addBreadCrumb(eq(PERSPECTIVE_ID), eq(Constants.INSTANCE.ProcessDefinitionBreadcrumb((processSummary.getName()))), eq(Commands.DO_NOTHING)); } @Test public void testProcessDefNameDefinitionOpenGenericForm() { String processDefName = "testProcessDefName"; presenter.openGenericForm("processDefId", "deploymentId", processDefName); ArgumentCaptor<ProcessDisplayerConfig> argument = ArgumentCaptor.forClass(ProcessDisplayerConfig.class); verify(startProcessDisplayProvider).setup(argument.capture(), any()); assertEquals(processDefName, argument.getValue().getProcessName()); } @Test public void testGetData() { when(processRuntimeDataService.getProcesses(anyString(), anyInt(), anyInt(), anyString(), anyBoolean())) .thenReturn(getMockList(10)) .thenReturn(getMockList(1)); Range range = new Range(0, 10); presenter.getData(range); verify(presenter).updateDataOnCallback(anyList(), eq(0), eq(10), eq(false)); range = new Range(10, 10); presenter.getData(range); verify(presenter).updateDataOnCallback(anyList(), eq(10), eq(11), eq(true)); } @Test public void testOnRuntimeDataServiceError() { final Throwable throwable = mock(Throwable.class); assertFalse(presenter.onRuntimeDataServiceError(throwable)); verify(presenter).updateDataOnCallback(emptyList(), 0, 0, true); verify(errorCallback).error(throwable); verify(view).hideBusyIndicator(); } @Test public void testListBreadcrumbCreation() { presenter.createListBreadcrumb(); ArgumentCaptor<Command> captureCommand = ArgumentCaptor.forClass(Command.class); verify(breadcrumbs).clearBreadcrumbs(PERSPECTIVE_ID); verify(breadcrumbs).addBreadCrumb(eq(PERSPECTIVE_ID), eq(commonConstants.Home()), captureCommand.capture()); captureCommand.getValue().execute(); verify(placeManager).goTo(PerspectiveIds.HOME); verify(breadcrumbs).addBreadCrumb(eq(PERSPECTIVE_ID), eq(commonConstants.Manage_Process_Definitions()), eq(Commands.DO_NOTHING)); verifyNoMoreInteractions(breadcrumbs); } @Test public void testSetupDetailBreadcrumb() { String detailLabel = "detailLabel"; String detailScreenId = "screenId"; PlaceManager placeManagerMock = mock(PlaceManager.class); presenter.setPlaceManager(placeManagerMock); presenter.setupDetailBreadcrumb(placeManagerMock, commonConstants.Manage_Process_Definitions(), detailLabel, detailScreenId); ArgumentCaptor<Command> captureCommand = ArgumentCaptor.forClass(Command.class); verify(breadcrumbs).clearBreadcrumbs(PERSPECTIVE_ID); verify(breadcrumbs).addBreadCrumb(eq(PERSPECTIVE_ID), eq(commonConstants.Home()), captureCommand.capture()); captureCommand.getValue().execute(); verify(placeManagerMock).goTo(PerspectiveIds.HOME); verify(breadcrumbs).addBreadCrumb(eq(PERSPECTIVE_ID), eq(commonConstants.Manage_Process_Definitions()), captureCommand.capture()); captureCommand.getValue().execute(); verify(placeManagerMock).closePlace(detailScreenId); verify(breadcrumbs).addBreadCrumb(eq(PERSPECTIVE_ID), eq(detailLabel), eq(Commands.DO_NOTHING)); } @Test public void testIsAuthorizedForView() { String perspectiveId = PROCESS_INSTANCES; when(authorizationManager.authorize(any(ResourceRef.class), eq(identity))).thenReturn(true, false); assertTrue(presenter.isUserAuthorizedForPerspective(perspectiveId)); final ArgumentCaptor<ResourceRef> captor = ArgumentCaptor.forClass(ResourceRef.class); verify(authorizationManager).authorize(captor.capture(), eq(identity)); assertEquals(perspectiveId, captor.getValue().getIdentifier()); assertEquals(ActivityResourceType.PERSPECTIVE, captor.getValue().getResourceType()); assertFalse(presenter.isUserAuthorizedForPerspective(perspectiveId)); } @Test public void testViewProcessInstanceActionCondition() { doAnswer(new PerspectiveAnswer(PROCESS_INSTANCES)).when(authorizationManager).authorize(any(ResourceRef.class), eq(identity)); assertTrue(presenter.getViewProcessInstanceActionCondition().test(new ProcessSummary())); when(authorizationManager.authorize(any(ResourceRef.class), eq(identity))).thenReturn(false); assertFalse(presenter.getViewProcessInstanceActionCondition().test(new ProcessSummary())); } @Test public void testViewProcessInstances() { String processDefinition = "procDef"; when(authorizationManager.authorize(any(ResourceRef.class), eq(identity))).thenReturn(true); presenter.viewProcessInstances(processDefinition); final ArgumentCaptor<PlaceRequest> captor = ArgumentCaptor.forClass(PlaceRequest.class); verify(placeManager).goTo(captor.capture()); assertEquals(1, captor.getAllValues().size()); assertEquals(PROCESS_INSTANCES, captor.getValue().getIdentifier()); assertEquals(1, captor.getValue().getParameters().size()); assertEquals(processDefinition, captor.getValue().getParameters().get(SEARCH_PARAMETER_PROCESS_DEFINITION_ID)); } protected class PerspectiveAnswer implements Answer<Boolean> { private String perspectiveId; public PerspectiveAnswer(String perspectiveId) { this.perspectiveId = perspectiveId; } @Override public Boolean answer(InvocationOnMock invocation) throws Throwable { return perspectiveId.equals(((ResourceRef) invocation.getArguments()[0]).getIdentifier()); } } }
/* * Copyright 2016-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package examples.simple; import static examples.simple.PersonDynamicSqlSupport.*; import static org.mybatis.dynamic.sql.SqlBuilder.*; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.function.UnaryOperator; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Result; import org.apache.ibatis.annotations.ResultMap; import org.apache.ibatis.annotations.Results; import org.apache.ibatis.annotations.SelectProvider; import org.apache.ibatis.type.JdbcType; import org.mybatis.dynamic.sql.BasicColumn; import org.mybatis.dynamic.sql.delete.DeleteDSLCompleter; import org.mybatis.dynamic.sql.insert.GeneralInsertDSL; import org.mybatis.dynamic.sql.select.CountDSLCompleter; import org.mybatis.dynamic.sql.select.SelectDSLCompleter; import org.mybatis.dynamic.sql.select.render.SelectStatementProvider; import org.mybatis.dynamic.sql.update.UpdateDSLCompleter; import org.mybatis.dynamic.sql.update.UpdateDSL; import org.mybatis.dynamic.sql.update.UpdateModel; import org.mybatis.dynamic.sql.util.SqlProviderAdapter; import org.mybatis.dynamic.sql.util.mybatis3.CommonCountMapper; import org.mybatis.dynamic.sql.util.mybatis3.CommonDeleteMapper; import org.mybatis.dynamic.sql.util.mybatis3.CommonInsertMapper; import org.mybatis.dynamic.sql.util.mybatis3.CommonUpdateMapper; import org.mybatis.dynamic.sql.util.mybatis3.MyBatis3Utils; /** * * Note: this is the canonical mapper with the new style methods * and represents the desired output for MyBatis Generator * */ @Mapper public interface PersonMapper extends CommonCountMapper, CommonDeleteMapper, CommonInsertMapper<PersonRecord>, CommonUpdateMapper { @SelectProvider(type=SqlProviderAdapter.class, method="select") @Results(id="PersonResult", value= { @Result(column="A_ID", property="id", jdbcType=JdbcType.INTEGER, id=true), @Result(column="first_name", property="firstName", jdbcType=JdbcType.VARCHAR), @Result(column="last_name", property="lastName", jdbcType=JdbcType.VARCHAR, typeHandler=LastNameTypeHandler.class), @Result(column="birth_date", property="birthDate", jdbcType=JdbcType.DATE), @Result(column="employed", property="employed", jdbcType=JdbcType.VARCHAR, typeHandler=YesNoTypeHandler.class), @Result(column="occupation", property="occupation", jdbcType=JdbcType.VARCHAR), @Result(column="address_id", property="addressId", jdbcType=JdbcType.INTEGER) }) List<PersonRecord> selectMany(SelectStatementProvider selectStatement); @SelectProvider(type=SqlProviderAdapter.class, method="select") @ResultMap("PersonResult") Optional<PersonRecord> selectOne(SelectStatementProvider selectStatement); BasicColumn[] selectList = BasicColumn.columnList(id.as("A_ID"), firstName, lastName, birthDate, employed, occupation, addressId); default long count(CountDSLCompleter completer) { return MyBatis3Utils.countFrom(this::count, person, completer); } default long count(BasicColumn column, CountDSLCompleter completer) { return MyBatis3Utils.count(this::count, column, person, completer); } default long countDistinct(BasicColumn column, CountDSLCompleter completer) { return MyBatis3Utils.countDistinct(this::count, column, person, completer); } default int delete(DeleteDSLCompleter completer) { return MyBatis3Utils.deleteFrom(this::delete, person, completer); } default int deleteByPrimaryKey(Integer id_) { return delete(c -> c.where(id, isEqualTo(id_)) ); } default int generalInsert(UnaryOperator<GeneralInsertDSL> completer) { return MyBatis3Utils.generalInsert(this::generalInsert, person, completer); } default int insert(PersonRecord record) { return MyBatis3Utils.insert(this::insert, record, person, c -> c.map(id).toProperty("id") .map(firstName).toProperty("firstName") .map(lastName).toProperty("lastName") .map(birthDate).toProperty("birthDate") .map(employed).toProperty("employed") .map(occupation).toProperty("occupation") .map(addressId).toProperty("addressId") ); } default int insertMultiple(PersonRecord...records) { return insertMultiple(Arrays.asList(records)); } default int insertMultiple(Collection<PersonRecord> records) { return MyBatis3Utils.insertMultiple(this::insertMultiple, records, person, c -> c.map(id).toProperty("id") .map(firstName).toProperty("firstName") .map(lastName).toProperty("lastName") .map(birthDate).toProperty("birthDate") .map(employed).toProperty("employed") .map(occupation).toProperty("occupation") .map(addressId).toProperty("addressId") ); } default int insertSelective(PersonRecord record) { return MyBatis3Utils.insert(this::insert, record, person, c -> c.map(id).toPropertyWhenPresent("id", record::getId) .map(firstName).toPropertyWhenPresent("firstName", record::getFirstName) .map(lastName).toPropertyWhenPresent("lastName", record::getLastName) .map(birthDate).toPropertyWhenPresent("birthDate", record::getBirthDate) .map(employed).toPropertyWhenPresent("employed", record::getEmployed) .map(occupation).toPropertyWhenPresent("occupation", record::getOccupation) .map(addressId).toPropertyWhenPresent("addressId", record::getAddressId) ); } default Optional<PersonRecord> selectOne(SelectDSLCompleter completer) { return MyBatis3Utils.selectOne(this::selectOne, selectList, person, completer); } default List<PersonRecord> select(SelectDSLCompleter completer) { return MyBatis3Utils.selectList(this::selectMany, selectList, person, completer); } default List<PersonRecord> selectDistinct(SelectDSLCompleter completer) { return MyBatis3Utils.selectDistinct(this::selectMany, selectList, person, completer); } default Optional<PersonRecord> selectByPrimaryKey(Integer id_) { return selectOne(c -> c.where(id, isEqualTo(id_)) ); } default int update(UpdateDSLCompleter completer) { return MyBatis3Utils.update(this::update, person, completer); } static UpdateDSL<UpdateModel> updateAllColumns(PersonRecord record, UpdateDSL<UpdateModel> dsl) { return dsl.set(id).equalTo(record::getId) .set(firstName).equalTo(record::getFirstName) .set(lastName).equalTo(record::getLastName) .set(birthDate).equalTo(record::getBirthDate) .set(employed).equalTo(record::getEmployed) .set(occupation).equalTo(record::getOccupation) .set(addressId).equalTo(record::getAddressId); } static UpdateDSL<UpdateModel> updateSelectiveColumns(PersonRecord record, UpdateDSL<UpdateModel> dsl) { return dsl.set(id).equalToWhenPresent(record::getId) .set(firstName).equalToWhenPresent(record::getFirstName) .set(lastName).equalToWhenPresent(record::getLastName) .set(birthDate).equalToWhenPresent(record::getBirthDate) .set(employed).equalToWhenPresent(record::getEmployed) .set(occupation).equalToWhenPresent(record::getOccupation) .set(addressId).equalToWhenPresent(record::getAddressId); } default int updateByPrimaryKey(PersonRecord record) { return update(c -> c.set(firstName).equalTo(record::getFirstName) .set(lastName).equalTo(record::getLastName) .set(birthDate).equalTo(record::getBirthDate) .set(employed).equalTo(record::getEmployed) .set(occupation).equalTo(record::getOccupation) .set(addressId).equalTo(record::getAddressId) .where(id, isEqualTo(record::getId)) ); } default int updateByPrimaryKeySelective(PersonRecord record) { return update(c -> c.set(firstName).equalToWhenPresent(record::getFirstName) .set(lastName).equalToWhenPresent(record::getLastName) .set(birthDate).equalToWhenPresent(record::getBirthDate) .set(employed).equalToWhenPresent(record::getEmployed) .set(occupation).equalToWhenPresent(record::getOccupation) .set(addressId).equalToWhenPresent(record::getAddressId) .where(id, isEqualTo(record::getId)) ); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.curator.test.compatibility; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * Utility to get various testing times. * * Copied from the old Timing class which is now deprecated. Needed this to support ZK 3.4 compatibility */ public class Timing2 { private final long value; private final TimeUnit unit; private final int waitingMultiple; private static final double TICK_TIME_MULTIPLE = .10; private static final int DEFAULT_SECONDS = 10; private static final int DEFAULT_WAITING_MULTIPLE = 5; private static final double SESSION_MULTIPLE = 1.5; private static final double SESSION_SLEEP_MULTIPLE = SESSION_MULTIPLE * 1.75; // has to be at least session + 2/3 of a session to account for missed heartbeat then session expiration /** * Use the default base time */ public Timing2() { this(Integer.getInteger("timing-multiple", 1), getWaitingMultiple()); } /** * Use a multiple of the default base time * * @param multiple the multiple */ public Timing2(double multiple) { this((long)(DEFAULT_SECONDS * multiple), TimeUnit.SECONDS, getWaitingMultiple()); } /** * Use a multiple of the default base time * * @param multiple the multiple * @param waitingMultiple multiple of main timing to use when waiting */ public Timing2(double multiple, int waitingMultiple) { this((long)(DEFAULT_SECONDS * multiple), TimeUnit.SECONDS, waitingMultiple); } /** * @param value base time * @param unit base time unit */ public Timing2(long value, TimeUnit unit) { this(value, unit, getWaitingMultiple()); } /** * @param value base time * @param unit base time unit * @param waitingMultiple multiple of main timing to use when waiting */ public Timing2(long value, TimeUnit unit, int waitingMultiple) { this.value = value; this.unit = unit; this.waitingMultiple = waitingMultiple; } /** * Return the base time in milliseconds * * @return time ms */ public int milliseconds() { return (int)TimeUnit.MILLISECONDS.convert(value, unit); } /** * Return the base time in seconds * * @return time secs */ public int seconds() { return (int)value; } /** * Wait on the given latch * * @param latch latch to wait on * @return result of {@link java.util.concurrent.CountDownLatch#await(long, java.util.concurrent.TimeUnit)} */ public boolean awaitLatch(CountDownLatch latch) { Timing2 m = forWaiting(); try { return latch.await(m.value, m.unit); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } return false; } /** * Try to take an item from the given queue * * @param queue queue * @return item * @throws Exception interrupted or timed out */ public <T> T takeFromQueue(BlockingQueue<T> queue) throws Exception { Timing2 m = forWaiting(); try { T value = queue.poll(m.value, m.unit); if ( value == null ) { throw new TimeoutException("Timed out trying to take from queue"); } return value; } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw e; } } /** * Wait on the given semaphore * * @param semaphore the semaphore * @return result of {@link java.util.concurrent.Semaphore#tryAcquire()} */ public boolean acquireSemaphore(Semaphore semaphore) { Timing2 m = forWaiting(); try { return semaphore.tryAcquire(m.value, m.unit); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } return false; } /** * Wait on the given semaphore * * @param semaphore the semaphore * @param n number of permits to acquire * @return result of {@link java.util.concurrent.Semaphore#tryAcquire(int, long, java.util.concurrent.TimeUnit)} */ public boolean acquireSemaphore(Semaphore semaphore, int n) { Timing2 m = forWaiting(); try { return semaphore.tryAcquire(n, m.value, m.unit); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } return false; } /** * Return a new timing that is a multiple of the this timing * * @param n the multiple * @return this timing times the multiple */ public Timing2 multiple(double n) { return new Timing2((int)(value * n), unit); } /** * Return a new timing that is a multiple of the this timing * * @param n the multiple * @param waitingMultiple new waitingMultiple * @return this timing times the multiple */ public Timing2 multiple(double n, int waitingMultiple) { return new Timing2((int)(value * n), unit, waitingMultiple); } /** * Return a new timing with the standard multiple for waiting on latches, etc. * * @return this timing multiplied */ @SuppressWarnings("PointlessArithmeticExpression") public Timing2 forWaiting() { return multiple(waitingMultiple); } /** * Return a new timing with a multiple that ensures a ZK session timeout * * @return this timing multiplied */ public Timing2 forSessionSleep() { return multiple(SESSION_SLEEP_MULTIPLE, 1); } /** * Return a new timing with a multiple for sleeping a smaller amount of time * * @return this timing multiplied */ public Timing2 forSleepingABit() { return multiple(.25); } /** * Sleep for a small amount of time * * @throws InterruptedException if interrupted */ public void sleepABit() throws InterruptedException { forSleepingABit().sleep(); } /** * Sleep for a the full amount of time * * @throws InterruptedException if interrupted */ public void sleep() throws InterruptedException { unit.sleep(value); } /** * Return the value to use for ZK session timeout * * @return session timeout */ public int session() { return multiple(SESSION_MULTIPLE).milliseconds(); } /** * Return the value to use for ZK connection timeout * * @return connection timeout */ public int connection() { return milliseconds(); } /** * Value to use for server "tickTime" * * @return tick time */ public int tickTime() { return (int)Math.max(1, milliseconds() * TICK_TIME_MULTIPLE); } private static Integer getWaitingMultiple() { return Integer.getInteger("timing-waiting-multiple", DEFAULT_WAITING_MULTIPLE); } }
package net.demus_intergalactical.serverman.instance; import net.demus_intergalactical.serverman.*; import org.apache.commons.io.FileUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.ParseException; import javax.script.Invocable; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.*; public class ServerInstance { private String serverInstanceID; private String name; private String serverFile; private String serverVersion; private List<String> javaArgs; private ServerInstanceProcess p; private ScriptEngine js; private OutputHandler out; private PlayerHandler playerHandler; private StatusHandler statusHandler; private Completion completion; private boolean loaded; public ServerInstance() { javaArgs = new LinkedList<>(); p = new ServerInstanceProcess(this); } public ServerInstance load() throws NoSuchMethodException, ScriptException, IOException { loadConfig(); loadMatchScript(); loadCompletion(); loaded = true; return this; } public ServerInstance save() { JSONArray a = new JSONArray(); a.addAll(javaArgs); JSONObject obj = new JSONObject(); obj.put("name", name); obj.put("server_file", serverFile); obj.put("server_version", serverVersion); obj.put("java_args", a); Globals.getInstanceSettings().add(serverInstanceID, obj); Globals.getInstanceSettings().saveConfig(); return this; } public void loadConfig() { String instanceHome = Globals.getServerManConfig() .get("instances_home") + File.separator + serverInstanceID; File dir = new File(instanceHome); if (!dir.exists()) { if (!dir.mkdirs()) { System.err.println("Could not create" + "instance folder"); } } JSONObject obj = (JSONObject) Globals.getInstanceSettings().get(serverInstanceID); if (obj == null) { Globals.getInstanceSettings() .addDefault(serverInstanceID); obj = (JSONObject) Globals.getInstanceSettings().get(serverInstanceID); } javaArgs = new JSONArray(); Object[] tmpArgs; name = (String) obj.get("name"); serverFile = (String) obj.get("server_file"); serverVersion = (String) obj.get("server_version"); tmpArgs = ((JSONArray) obj.get("java_args")).toArray(); for (Object arg : tmpArgs) { javaArgs.add((String) arg); } } public void loadMatchScript() throws IOException, ScriptException, NoSuchMethodException { PlayerWrapper pw = new PlayerWrapper(this, playerHandler); String matchScriptPath = Globals.getServerManConfig() .get("instances_home") + File.separator + serverInstanceID + File.separator + "match.js"; File matchScriptFile = new File(matchScriptPath); ScriptEngineManager sm = new ScriptEngineManager(); this.js = sm.getEngineByName("JavaScript"); js.put("log", out); js.put("players", pw); js.eval(new FileReader(matchScriptFile)); ((Invocable) js).invokeFunction("init"); } private void loadCompletion() throws IOException { String completionFilePath = Globals.getServerManConfig() .get("instances_home") + File.separator + serverInstanceID + File.separator + "completion.json"; File completionFile = new File(completionFilePath); if (!completionFile.exists()) { try { String url = "http://serverman" + ".demus-intergalactical.net/v/" + serverVersion + "/completion.json"; Utils.download(url, completionFile); } catch (IOException e) { completionFile.createNewFile(); } } JSONObject completionObj; try { completionObj = Utils.loadJson(completionFilePath); } catch (ParseException e) { completionObj = new JSONObject(); } completion = new Completion(completionObj, p); } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getServerFile() { return serverFile; } public void setServerFile(String serverFile) { this.serverFile = serverFile; } public String getServerInstanceID() { return serverInstanceID; } public void setServerInstanceID(String serverInstanceID) { this.serverInstanceID = serverInstanceID; } public List<String> getJavaArgs() { return javaArgs; } public void setJavaArgs(List<String> javaArgs) { this.javaArgs = javaArgs; } public PlayerHandler getPlayerHandler() { return playerHandler; } public void setPlayerHandler(PlayerHandler playerHandler) { this.playerHandler = playerHandler; } public OutputHandler getOut() { return out; } public void setOut(OutputHandler out) { this.out = out; } public void run() { p.start(); } public synchronized boolean isRunning() { return p != null && p.isRunning(); } public synchronized void send(String command) { p.send(command); } public synchronized void stop() { send("stop"); } public synchronized ServerInstanceProcess getProcess() { return p; } public ScriptEngine getMatcherJS() { return js; } public String getServerVersion() { return serverVersion; } public void setServerVersion(String serverVersion) { this.serverVersion = serverVersion; } public void setIcon(File icon) throws IOException { String iconPath = Globals.getServerManConfig() .get("instances_home") + File.separator + serverInstanceID + File.separator + "server-icon.png"; FileUtils.copyFile(icon, new File(iconPath)); } public File getIcon() { String iconPath = Globals.getServerManConfig() .get("instances_home") + File.separator + serverInstanceID + File.separator + "server-icon.png"; File f = new File(iconPath); if (!f.exists()) { return null; } return f; } public StatusHandler getStatusHandler() { return statusHandler; } public void setStatusHandler(StatusHandler statusHandler) { this.statusHandler = statusHandler; } public JSONObject getCompletion() { return completion.getCompletion(); } public Set<String> complete(String s) { return completion.complete(s); } public boolean isLoaded() { return loaded; } }
package org.jcodec.samples.mp4; import java.awt.Dimension; import java.awt.GridLayout; import java.io.File; import java.io.IOException; import java.lang.reflect.Array; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTextArea; import javax.swing.JTextField; import javax.swing.JTree; import javax.swing.UIManager; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeSelectionModel; import org.jcodec.common.Tuple; import org.jcodec.common.Tuple._2; import org.jcodec.common.io.FileChannelWrapper; import org.jcodec.common.io.NIOUtils; import org.jcodec.containers.mp4.MP4Util; import org.jcodec.containers.mp4.MP4Util.Atom; import org.jcodec.containers.mp4.boxes.Box; import org.jcodec.containers.mp4.boxes.Box.AtomField; import org.jcodec.containers.mp4.boxes.NodeBox; import net.miginfocom.swing.MigLayout; public class MP4Analyzer extends JPanel implements TreeSelectionListener { private static final int MAX_COUNT = 100; private JTree tree; private final static Set<Class> primitive = new HashSet<Class>(); static { primitive.add(Boolean.class); primitive.add(Byte.class); primitive.add(Short.class); primitive.add(Integer.class); primitive.add(Long.class); primitive.add(Float.class); primitive.add(Double.class); primitive.add(Character.class); } private static boolean playWithLineStyle = false; private static String lineStyle = "Horizontal"; private static boolean useSystemLookAndFeel = false; private JPanel rightPane; class BoxNode { private Box box; private Atom atom; public BoxNode(Box box, Atom atom) { this.box = box; this.atom = atom; } @Override public String toString() { return box == null ? atom.getHeader().getFourcc() : box.getFourcc(); } } public MP4Analyzer(String filename) throws IOException { super(new GridLayout(1, 0)); DefaultMutableTreeNode top = new DefaultMutableTreeNode(filename); createNodes(top, filename); tree = new JTree(top); tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); tree.addTreeSelectionListener(this); if (playWithLineStyle) { System.out.println("line style = " + lineStyle); tree.putClientProperty("JTree.lineStyle", lineStyle); } JScrollPane treeView = new JScrollPane(tree); rightPane = new JPanel(new MigLayout()); JScrollPane rightView = new JScrollPane(rightPane); JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); splitPane.setLeftComponent(treeView); splitPane.setRightComponent(rightView); treeView.setMinimumSize(new Dimension(300, 500)); rightView.setMinimumSize(new Dimension(500, 500)); splitPane.setDividerLocation(300); splitPane.setPreferredSize(new Dimension(1024, 768)); add(splitPane); } public void valueChanged(TreeSelectionEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent(); if (node == null) return; Object nodeInfo = node.getUserObject(); if (node.isLeaf()) { BoxNode box = (BoxNode) nodeInfo; displayBox(box); } } private void displayBox(BoxNode box) { rightPane.removeAll(); rightPane.revalidate(); rightPane.repaint(); if (box.box != null) { HashMap<Integer, Tuple._2<String, Object>> map = new HashMap<Integer, Tuple._2<String, Object>>(); Method[] methods = box.box.getClass().getMethods(); for (Method method : methods) { if (!isDefined(method)) { continue; } AtomField annotation = method.getAnnotation(Box.AtomField.class); try { Object value = method.invoke(box.box); map.put(annotation.idx(), new Tuple._2<String, Object>(toName(method), value)); } catch (Exception e) { } } for (int i = 0; i < 1000; i++) { if (map.containsKey(i)) { _2<String, Object> field = map.get(i); rightPane.add(new JLabel(field.v0)); rightPane.add(renderValue(field.v1), "wrap"); } } } else if (box.atom != null) { rightPane.add(new JLabel("Offset")); rightPane.add(new JTextField(String.valueOf(box.atom.getOffset()), 20), "wrap"); rightPane.add(new JLabel("Size")); rightPane.add(new JTextField(String.valueOf(box.atom.getHeader().getSize()), 20), "wrap"); } rightPane.revalidate(); rightPane.repaint(); } private JComponent renderValue(Object obj) { if (obj == null) { return new JTextField("null", 20); } if (primitive.contains(obj.getClass())) { return new JTextField(String.valueOf(obj), 20); } String className = obj.getClass().getName(); if (className.startsWith("java.lang") && !className.equals("java.lang.String")) { return new JTextField("null", 20); } if (obj instanceof ByteBuffer) obj = NIOUtils.toArray((ByteBuffer) obj); if (obj == null) { return new JTextField("null", 20); } else if (obj instanceof String) { return new JTextField((String) obj, 50); } else if (obj instanceof Map) { JPanel ret = new JPanel(new MigLayout()); Iterator it = ((Map) obj).entrySet().iterator(); while (it.hasNext()) { Map.Entry e = (Map.Entry) it.next(); ret.add(new JLabel(String.valueOf(e.getKey()), JLabel.TRAILING)); ret.add(renderValue(e.getValue()), "wrap"); } return ret; } else if (obj instanceof Iterable) { JPanel ret = new JPanel(new MigLayout()); Iterator it = ((Iterable) obj).iterator(); while (it.hasNext()) { ret.add(renderValue(it.next()), "wrap"); } return ret; } else if (obj instanceof Object[]) { JPanel ret = new JPanel(new MigLayout()); int len = Array.getLength(obj); for (int i = 0; i < Math.min(MAX_COUNT, len); i++) { ret.add(renderValue(Array.get(obj, i)), "wrap"); } return ret; } else if (obj instanceof long[]) { StringBuilder bldr = new StringBuilder(); long[] a = (long[]) obj; int i = 0; for (; i < Math.min(MAX_COUNT, a.length); i++) { bldr.append(a[i] + " "); } if (i < MAX_COUNT) bldr.append("..."); JTextArea ret = new JTextArea(bldr.toString(), 10, 50); ret.setLineWrap(true); return ret; } else if (obj instanceof int[]) { StringBuilder bldr = new StringBuilder(); int[] a = (int[]) obj; int i = 0; for (; i < Math.min(MAX_COUNT, a.length); i++) { bldr.append(a[i] + " "); } if (i < MAX_COUNT) bldr.append("..."); JTextArea ret = new JTextArea(bldr.toString(), 10, 50); ret.setLineWrap(true); return ret; } else if (obj instanceof float[]) { StringBuilder bldr = new StringBuilder(); float[] a = (float[]) obj; int i = 0; for (; i < Math.min(MAX_COUNT, a.length); i++) { bldr.append(a[i] + " "); } if (i < MAX_COUNT) bldr.append("..."); JTextArea ret = new JTextArea(bldr.toString(), 10, 50); ret.setLineWrap(true); return ret; } else if (obj instanceof double[]) { StringBuilder bldr = new StringBuilder(); double[] a = (double[]) obj; int i = 0; for (; i < Math.min(MAX_COUNT, a.length); i++) { bldr.append(a[i] + " "); } if (i < MAX_COUNT) bldr.append("..."); JTextArea ret = new JTextArea(bldr.toString(), 10, 50); ret.setLineWrap(true); return ret; } else if (obj instanceof short[]) { StringBuilder bldr = new StringBuilder(); short[] a = (short[]) obj; int i = 0; for (; i < Math.min(MAX_COUNT, a.length); i++) { bldr.append(a[i] + " "); } if (i < MAX_COUNT) bldr.append("..."); JTextArea ret = new JTextArea(bldr.toString(), 10, 50); ret.setLineWrap(true); return ret; } else if (obj instanceof byte[]) { StringBuilder bldr = new StringBuilder(); byte[] a = (byte[]) obj; int i = 0; for (; i < Math.min(MAX_COUNT, a.length); i++) { bldr.append(a[i] + " "); } if (i < MAX_COUNT) bldr.append("..."); JTextArea ret = new JTextArea(bldr.toString(), 10, 50); ret.setLineWrap(true); return ret; } else if (obj instanceof boolean[]) { StringBuilder bldr = new StringBuilder(); boolean[] a = (boolean[]) obj; int i = 0; for (; i < Math.min(MAX_COUNT, a.length); i++) { bldr.append(a[i] + " "); } if (i < MAX_COUNT) bldr.append("..."); JTextArea ret = new JTextArea(bldr.toString(), 10, 50); return ret; } else if (obj.getClass().isEnum()) { return new JTextField(String.valueOf(obj), 50); } else { JPanel ret = new JPanel(new MigLayout()); Method[] methods = obj.getClass().getMethods(); for (Method method : methods) { if (!isDefined(method)) { continue; } String name = toName(method); Object value; try { value = method.invoke(obj); ret.add(new JLabel(name)); ret.add(renderValue(value), "wrap"); } catch (Exception e) { System.err.println("could not do it"); } } return ret; } } private static String toName(Method method) { char[] name = method.getName().toCharArray(); int ind = name[0] == 'g' ? 3 : 2; name[ind] = Character.toLowerCase(name[ind]); return new String(name, ind, name.length - ind); } private boolean isDefined(Method method) { if (!Modifier.isPublic(method.getModifiers())) return false; if (!method.getName().startsWith("get") && !(method.getName().startsWith("is") && method.getReturnType() == Boolean.TYPE)) return false; if (method.getParameterTypes().length != 0) return false; if (void.class.equals(method.getReturnType())) return false; if (Modifier.isStatic(method.getModifiers())) return false; if (!method.isAnnotationPresent(Box.AtomField.class)) return false; return true; } private void createNodes(DefaultMutableTreeNode top, String filename) throws IOException { DefaultMutableTreeNode category = null; FileChannelWrapper ch = null; try { ch = NIOUtils.readableChannel(new File(filename)); List<Atom> rootAtoms = MP4Util.getRootAtoms(ch); for (Atom atom : rootAtoms) { category = new DefaultMutableTreeNode(new BoxNode(null, atom)); top.add(category); String fourcc = atom.getHeader().getFourcc(); if ("moov".equals(fourcc)) { NodeBox moov = (NodeBox) atom.parseBox(ch); addSub(moov, category); } } } finally { if (ch != null) ch.close(); } } private void addSub(NodeBox node, DefaultMutableTreeNode category) throws IOException { for (Box box : node.getBoxes()) { DefaultMutableTreeNode sub = new DefaultMutableTreeNode(new BoxNode(box, null)); if (box instanceof NodeBox) { addSub((NodeBox) box, sub); } category.add(sub); } } /** * Create the GUI and show it. For thread safety, this method should be invoked * from the event dispatch thread. * * @param filename * @throws IOException */ private static void createAndShowGUI(String filename) throws IOException { if (useSystemLookAndFeel) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { System.err.println("Couldn't use system look and feel."); } } JFrame frame = new JFrame("MP4Analyzer"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.add(new MP4Analyzer(filename)); frame.pack(); frame.setVisible(true); } public static void main(final String[] args) { javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { try { createAndShowGUI(args[0]); } catch (IOException e) { e.printStackTrace(); } } }); } }
package com.smixx.fabric; import android.app.Activity; import android.util.Log; import com.crashlytics.android.answers.AddToCartEvent; import com.crashlytics.android.answers.Answers; import com.crashlytics.android.answers.AnswersEvent; import com.crashlytics.android.answers.ContentViewEvent; import com.crashlytics.android.answers.CustomEvent; import com.crashlytics.android.answers.InviteEvent; import com.crashlytics.android.answers.LevelEndEvent; import com.crashlytics.android.answers.LevelStartEvent; import com.crashlytics.android.answers.LoginEvent; import com.crashlytics.android.answers.PredefinedEvent; import com.crashlytics.android.answers.PurchaseEvent; import com.crashlytics.android.answers.RatingEvent; import com.crashlytics.android.answers.SearchEvent; import com.crashlytics.android.answers.ShareEvent; import com.crashlytics.android.answers.SignUpEvent; import com.crashlytics.android.answers.StartCheckoutEvent; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableMapKeySetIterator; import com.facebook.react.bridge.ReadableType; import java.math.BigDecimal; import java.util.Currency; public class SMXAnswers extends ReactContextBaseJavaModule { public Activity activity; public SMXAnswers(ReactApplicationContext reactContext, Activity activity) { super(reactContext); this.activity = activity; } @Override public String getName() { return "SMXAnswers"; } @ReactMethod public void logAddToCart(String itemPrice, String currency, String itemName, String itemType, String itemId, ReadableMap customAttributes) { AddToCartEvent event = new AddToCartEvent(); if (currency != null) event.putCurrency(Currency.getInstance(currency)); if (itemPrice != null) event.putItemPrice(new BigDecimal(itemPrice)); if (itemName != null) event.putItemName(itemName); if (itemType != null) event.putItemType(itemType); if (itemId != null) event.putItemId(itemId); addCustomAttributes(event, customAttributes); Answers.getInstance().logAddToCart(event); } @ReactMethod public void logContentView(String contentName, String contentType, String contentId, ReadableMap customAttributes) { ContentViewEvent event = new ContentViewEvent(); if (contentId != null) event.putContentId(contentId); if (contentType != null) event.putContentType(contentType); if (contentName != null) event.putContentName(contentName); addCustomAttributes(event, customAttributes); Answers.getInstance().logContentView(event); } @ReactMethod public void logCustom(String eventName, ReadableMap customAttributes) { CustomEvent event = new CustomEvent(eventName); addCustomAttributes(event, customAttributes); Answers.getInstance().logCustom(event); } @ReactMethod public void logInvite(String method, ReadableMap customAttributes) { InviteEvent event = new InviteEvent(); event.putMethod(method); addCustomAttributes(event, customAttributes); Answers.getInstance().logInvite(event); } @ReactMethod public void logLevelStart(String levelName, ReadableMap customAttributes) { LevelStartEvent event = new LevelStartEvent(); event.putLevelName(levelName); addCustomAttributes(event, customAttributes); Answers.getInstance().logLevelStart(event); } @ReactMethod public void logLevelEnd(String levelName, String score, boolean success, ReadableMap customAttributes) { LevelEndEvent event = new LevelEndEvent(); if (levelName != null) event.putLevelName(levelName); event.putSuccess(success); if (score != null) event.putScore(Double.valueOf(score)); addCustomAttributes(event, customAttributes); Answers.getInstance().logLevelEnd(event); } @ReactMethod public void logLogin(String method, boolean success, ReadableMap customAttributes) { LoginEvent event = new LoginEvent(); event.putMethod(method); event.putSuccess(success); addCustomAttributes(event, customAttributes); Answers.getInstance().logLogin(event); } @ReactMethod public void logPurchase(String itemPrice, String currency, boolean success, String itemName, String itemType, String itemId, ReadableMap customAttributes) { PurchaseEvent event = new PurchaseEvent(); if (currency != null) event.putCurrency(Currency.getInstance(currency)); if (itemPrice != null) event.putItemPrice(new BigDecimal(itemPrice)); if (itemName != null) event.putItemName(itemName); if (itemType != null) event.putItemType(itemType); if (itemId != null) event.putItemId(itemId); event.putSuccess(success); addCustomAttributes(event, customAttributes); Answers.getInstance().logPurchase(event); } @ReactMethod public void logRating(String rating, String contentId, String contentType, String contentName, ReadableMap customAttributes) { RatingEvent event = new RatingEvent(); event.putRating(Integer.valueOf(rating)); if (contentId != null) event.putContentId(contentId); if (contentType != null) event.putContentType(contentType); if (contentName != null) event.putContentName(contentName); addCustomAttributes(event, customAttributes); Answers.getInstance().logRating(event); } @ReactMethod public void logSearch(String query, ReadableMap customAttributes) { SearchEvent event = new SearchEvent(); event.putQuery(query); addCustomAttributes(event, customAttributes); Answers.getInstance().logSearch(event); } @ReactMethod public void logShare(String method, String contentName, String contentType, String contentId, ReadableMap customAttributes) { ShareEvent event = new ShareEvent(); event.putMethod(method); if (contentId != null) event.putContentId(contentId); if (contentType != null) event.putContentType(contentType); if (contentName != null) event.putContentName(contentName); addCustomAttributes(event, customAttributes); Answers.getInstance().logShare(event); } @ReactMethod public void logSignUp(String method, boolean success, ReadableMap customAttributes) { SignUpEvent event = new SignUpEvent(); event.putMethod(method); event.putSuccess(success); addCustomAttributes(event, customAttributes); Answers.getInstance().logSignUp(event); } @ReactMethod public void logStartCheckout(String totalPrice, String count, String currency, ReadableMap customAttributes) { StartCheckoutEvent event = new StartCheckoutEvent(); if (currency != null) event.putCurrency(Currency.getInstance(currency)); if (count != null) event.putItemCount(Integer.valueOf(count)); if (totalPrice != null) event.putTotalPrice(new BigDecimal(totalPrice)); addCustomAttributes(event, customAttributes); Answers.getInstance().logStartCheckout(event); } private void addCustomAttributes(AnswersEvent event, ReadableMap attributes) { if (attributes != null) { ReadableMapKeySetIterator itr = attributes.keySetIterator(); while (itr.hasNextKey()) { String key = itr.nextKey(); ReadableType type = attributes.getType(key); switch (type) { case Boolean: event.putCustomAttribute(key, String.valueOf(attributes.getBoolean(key))); break; case Number: event.putCustomAttribute(key, attributes.getDouble(key)); break; case String: event.putCustomAttribute(key, attributes.getString(key)); break; case Null: break; default: Log.e("ReactNativeFabric", "Can't add objects or arrays"); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.server.metadata; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.dataImpl.KeyExtent; import org.apache.accumulo.core.metadata.StoredTabletFile; import org.apache.accumulo.core.metadata.SuspendingTServer; import org.apache.accumulo.core.metadata.TServerInstance; import org.apache.accumulo.core.metadata.TabletFile; import org.apache.accumulo.core.metadata.schema.Ample; import org.apache.accumulo.core.metadata.schema.Ample.TabletMutator; import org.apache.accumulo.core.metadata.schema.DataFileValue; import org.apache.accumulo.core.metadata.schema.ExternalCompactionId; import org.apache.accumulo.core.metadata.schema.ExternalCompactionMetadata; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.BulkFileColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ChoppedColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.CurrentLocationColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.DataFileColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ExternalCompactionColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.FutureLocationColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.LastLocationColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.LogColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ScanFileColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ServerColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.SuspendLocationColumn; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.TabletColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataTime; import org.apache.accumulo.core.metadata.schema.TabletMetadata.LocationType; import org.apache.accumulo.core.tabletserver.log.LogEntry; import org.apache.accumulo.fate.FateTxId; import org.apache.accumulo.fate.zookeeper.ServiceLock; import org.apache.accumulo.server.ServerContext; import org.apache.hadoop.io.Text; import com.google.common.base.Preconditions; public abstract class TabletMutatorBase implements Ample.TabletMutator { private final ServerContext context; private final KeyExtent extent; private final Mutation mutation; protected AutoCloseable closeAfterMutate; private boolean updatesEnabled = true; protected TabletMutatorBase(ServerContext context, KeyExtent extent) { this.extent = extent; this.context = context; mutation = new Mutation(extent.toMetaRow()); } @Override public Ample.TabletMutator putPrevEndRow(Text per) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); TabletColumnFamily.PREV_ROW_COLUMN.put(mutation, TabletColumnFamily.encodePrevEndRow(extent.prevEndRow())); return this; } @Override public Ample.TabletMutator putDirName(String dirName) { ServerColumnFamily.validateDirCol(dirName); Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); ServerColumnFamily.DIRECTORY_COLUMN.put(mutation, new Value(dirName)); return this; } @Override public Ample.TabletMutator putFile(TabletFile path, DataFileValue dfv) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.put(DataFileColumnFamily.NAME, path.getMetaInsertText(), new Value(dfv.encode())); return this; } @Override public Ample.TabletMutator deleteFile(StoredTabletFile path) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.putDelete(DataFileColumnFamily.NAME, path.getMetaUpdateDeleteText()); return this; } @Override public Ample.TabletMutator putScan(TabletFile path) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.put(ScanFileColumnFamily.NAME, path.getMetaInsertText(), new Value()); return this; } @Override public Ample.TabletMutator deleteScan(StoredTabletFile path) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.putDelete(ScanFileColumnFamily.NAME, path.getMetaUpdateDeleteText()); return this; } @Override public Ample.TabletMutator putCompactionId(long compactionId) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); ServerColumnFamily.COMPACT_COLUMN.put(mutation, new Value(Long.toString(compactionId))); return this; } @Override public Ample.TabletMutator putFlushId(long flushId) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); ServerColumnFamily.FLUSH_COLUMN.put(mutation, new Value(Long.toString(flushId))); return this; } @Override public Ample.TabletMutator putTime(MetadataTime time) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); ServerColumnFamily.TIME_COLUMN.put(mutation, new Value(time.encode())); return this; } private String getLocationFamily(LocationType type) { switch (type) { case CURRENT: return CurrentLocationColumnFamily.STR_NAME; case FUTURE: return FutureLocationColumnFamily.STR_NAME; case LAST: return LastLocationColumnFamily.STR_NAME; default: throw new IllegalArgumentException(); } } @Override public Ample.TabletMutator putLocation(TServerInstance tsi, LocationType type) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.put(getLocationFamily(type), tsi.getSession(), tsi.getHostPort()); return this; } @Override public Ample.TabletMutator deleteLocation(TServerInstance tsi, LocationType type) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.putDelete(getLocationFamily(type), tsi.getSession()); return this; } @Override public Ample.TabletMutator putZooLock(ServiceLock zooLock) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); ServerColumnFamily.LOCK_COLUMN.put(mutation, new Value(zooLock.getLockID().serialize(context.getZooKeeperRoot() + "/"))); return this; } @Override public Ample.TabletMutator putWal(LogEntry logEntry) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.put(logEntry.getColumnFamily(), logEntry.getColumnQualifier(), logEntry.getValue()); return this; } @Override public Ample.TabletMutator deleteWal(LogEntry logEntry) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.putDelete(logEntry.getColumnFamily(), logEntry.getColumnQualifier()); return this; } @Override public Ample.TabletMutator deleteWal(String wal) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.putDelete(LogColumnFamily.STR_NAME, wal); return this; } @Override public Ample.TabletMutator putBulkFile(TabletFile bulkref, long tid) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.put(BulkFileColumnFamily.NAME, bulkref.getMetaInsertText(), new Value(FateTxId.formatTid(tid))); return this; } @Override public Ample.TabletMutator deleteBulkFile(TabletFile bulkref) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.putDelete(BulkFileColumnFamily.NAME, bulkref.getMetaInsertText()); return this; } @Override public Ample.TabletMutator putChopped() { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); ChoppedColumnFamily.CHOPPED_COLUMN.put(mutation, new Value("chopped")); return this; } @Override public Ample.TabletMutator putSuspension(TServerInstance tServer, long suspensionTime) { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.put(SuspendLocationColumn.SUSPEND_COLUMN.getColumnFamily(), SuspendLocationColumn.SUSPEND_COLUMN.getColumnQualifier(), SuspendingTServer.toValue(tServer, suspensionTime)); return this; } @Override public Ample.TabletMutator deleteSuspension() { Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate."); mutation.putDelete(SuspendLocationColumn.SUSPEND_COLUMN.getColumnFamily(), SuspendLocationColumn.SUSPEND_COLUMN.getColumnQualifier()); return this; } @Override public TabletMutator putExternalCompaction(ExternalCompactionId ecid, ExternalCompactionMetadata ecMeta) { mutation.put(ExternalCompactionColumnFamily.STR_NAME, ecid.canonical(), ecMeta.toJson()); return this; } @Override public TabletMutator deleteExternalCompaction(ExternalCompactionId ecid) { mutation.putDelete(ExternalCompactionColumnFamily.STR_NAME, ecid.canonical()); return this; } protected Mutation getMutation() { updatesEnabled = false; return mutation; } public void setCloseAfterMutate(AutoCloseable closeable) { this.closeAfterMutate = closeable; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.util.eval; import java.io.IOException; import java.util.Collection; import java.util.NoSuchElementException; import opennlp.tools.util.CollectionObjectStream; import opennlp.tools.util.ObjectStream; /** * Provides access to training and test partitions for n-fold cross validation. * <p> * Cross validation is used to evaluate the performance of a classifier when only * training data is available. The training set is split into n parts * and the training / evaluation is performed n times on these parts. * The training partition always consists of n -1 parts and one part is used for testing. * <p> * To use the <code>CrossValidationPartioner</code> a client iterates over the n * <code>TrainingSampleStream</code>s. Each </code>TrainingSampleStream</code> represents * one partition and is used first for training and afterwards for testing. * The <code>TestSampleStream</code> can be obtained from the <code>TrainingSampleStream</code> * with the <code>getTestSampleStream</code> method. */ public class CrossValidationPartitioner<E> { /** * The <code>TestSampleStream</code> iterates over all test elements. * * @param <E> */ private static class TestSampleStream<E> implements ObjectStream<E> { private ObjectStream<E> sampleStream; private final int numberOfPartitions; private final int testIndex; private int index; private boolean isPoisened; private TestSampleStream(ObjectStream<E> sampleStream, int numberOfPartitions, int testIndex) { this.numberOfPartitions = numberOfPartitions; this.sampleStream = sampleStream; this.testIndex = testIndex; } public E read() throws IOException { if (isPoisened) { throw new IllegalStateException(); } // skip training samples while (index % numberOfPartitions != testIndex) { sampleStream.read(); index++; } index++; return sampleStream.read(); } /** * Throws <code>UnsupportedOperationException</code> */ public void reset() { throw new UnsupportedOperationException(); } public void close() throws IOException { sampleStream.close(); isPoisened = true; } void poison() { isPoisened = true; } } /** * The <code>TrainingSampleStream</code> which iterates over * all training elements. * * Note: * After the <code>TestSampleStream</code> was obtained * the <code>TrainingSampleStream</code> must not be used * anymore, otherwise a {@link IllegalStateException} * is thrown. * * The <code>ObjectStream></code>s must not be used anymore after the * <code>CrossValidationPartitioner</code> was moved * to one of next partitions. If they are called anyway * a {@link IllegalStateException} is thrown. * * @param <E> */ public static class TrainingSampleStream<E> implements ObjectStream<E> { private ObjectStream<E> sampleStream; private final int numberOfPartitions; private final int testIndex; private int index; private boolean isPoisened; private TestSampleStream<E> testSampleStream; TrainingSampleStream(ObjectStream<E> sampleStream, int numberOfPartitions, int testIndex) { this.numberOfPartitions = numberOfPartitions; this.sampleStream = sampleStream; this.testIndex = testIndex; } public E read() throws IOException { if (testSampleStream != null || isPoisened) { throw new IllegalStateException(); } // If the test element is reached skip over it to not include it in // the training data if (index % numberOfPartitions == testIndex) { sampleStream.read(); index++; } index++; return sampleStream.read(); } /** * Resets the training sample. Use this if you need to collect things before * training, for example, to collect induced abbreviations or create a POS * Dictionary. * * @throws IOException */ public void reset() throws IOException { if (testSampleStream != null || isPoisened) { throw new IllegalStateException(); } this.index = 0; this.sampleStream.reset(); } public void close() throws IOException { sampleStream.close(); poison(); } void poison() { isPoisened = true; if (testSampleStream != null) testSampleStream.poison(); } /** * Retrieves the <code>ObjectStream</code> over the test/evaluations * elements and poisons this <code>TrainingSampleStream</code>. * From now on calls to the hasNext and next methods are forbidden * and will raise an<code>IllegalArgumentException</code>. * * @return the test sample stream */ public ObjectStream<E> getTestSampleStream() throws IOException { if (isPoisened) { throw new IllegalStateException(); } if (testSampleStream == null) { sampleStream.reset(); testSampleStream = new TestSampleStream<E>(sampleStream, numberOfPartitions, testIndex); } return testSampleStream; } } /** * An <code>ObjectStream</code> over the whole set of data samples which * are used for the cross validation. */ private ObjectStream<E> sampleStream; /** * The number of parts the data is divided into. */ private final int numberOfPartitions; /** * The index of test part. */ private int testIndex; /** * The last handed out <code>TrainingIterator</code>. The reference * is needed to poison the instance to fail fast if it is used * despite the fact that it is forbidden!. */ private TrainingSampleStream<E> lastTrainingSampleStream; /** * Initializes the current instance. * * @param inElements * @param numberOfPartitions */ public CrossValidationPartitioner(ObjectStream<E> inElements, int numberOfPartitions) { this.sampleStream = inElements; this.numberOfPartitions = numberOfPartitions; } /** * Initializes the current instance. * * @param elements * @param numberOfPartitions */ public CrossValidationPartitioner(Collection<E> elements, int numberOfPartitions) { this(new CollectionObjectStream<E>(elements), numberOfPartitions); } /** * Checks if there are more partitions available. */ public boolean hasNext() { return testIndex < numberOfPartitions; } /** * Retrieves the next training and test partitions. */ public TrainingSampleStream<E> next() throws IOException { if (hasNext()) { if (lastTrainingSampleStream != null) lastTrainingSampleStream.poison(); sampleStream.reset(); TrainingSampleStream<E> trainingSampleStream = new TrainingSampleStream<E>(sampleStream, numberOfPartitions, testIndex); testIndex++; lastTrainingSampleStream = trainingSampleStream; return trainingSampleStream; } else { throw new NoSuchElementException(); } } @Override public String toString() { return "At partition" + Integer.toString(testIndex + 1) + " of " + Integer.toString(numberOfPartitions); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package dk.statsbiblioteket.summa.storage.database.cursors; import dk.statsbiblioteket.summa.common.Record; import dk.statsbiblioteket.summa.common.util.UniqueTimestampGenerator; import dk.statsbiblioteket.summa.storage.api.QueryOptions; import dk.statsbiblioteket.summa.storage.database.DatabaseStorage; import dk.statsbiblioteket.util.qa.QAInfo; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.NoSuchElementException; /** * Wraps a ResultSet and the context in was created in as a {@link Cursor}. */ @QAInfo(level = QAInfo.Level.NORMAL, state = QAInfo.State.QA_NEEDED, author = "mke", reviewers = "hbk") public class ResultSetCursor implements ConnectionCursor { private static final Log log = LogFactory.getLog(ResultSetCursor.class); private long firstAccess; private long lastAccess; private long totalRecords; private long key; // key is 0 for anonymous cursors private long nextMtimeTimestamp; private long currentMtimeTimestamp; private PreparedStatement stmt; protected String base; private ResultSet resultSet; private Record nextRecord; private QueryOptions options; private boolean resultSetHasNext; private DatabaseStorage db; /** * Create a new non-anonymous cursor with {@code null} base and query * options. * * @param db The DatabaseStorage owning the cursor. * @param stmt The statement which produced {@code resultSet}. * @param resultSet The ResultSet to read records from. * @throws SQLException on any SQLException reading the result set. * @throws IOException on any IOExceptions reading records. */ public ResultSetCursor( DatabaseStorage db, PreparedStatement stmt, ResultSet resultSet) throws SQLException, IOException { this(db, stmt, resultSet, null, null, false); } /** * Create a new possibly anonymous cursor with {@code null} base and query * options. * * @param db The DatabaseStorage owning the cursor. * @param stmt The statement which produced {@code resultSet}. * @param resultSet The ResultSet to read records from. * @param anonymous Anonymous cursors does less logging. They are suitable * for short lived, and intermediate, result sets. * @throws SQLException on any SQLException reading the result set. * @throws IOException on any IOExceptions reading records. */ public ResultSetCursor(DatabaseStorage db, PreparedStatement stmt, ResultSet resultSet, boolean anonymous) throws SQLException, IOException { this(db, stmt, resultSet, null, null, anonymous); } /** * Create a new non-anonymous cursor. * * @param db The DatabaseStorage owning the cursor. * @param stmt The statement which produced {@code resultSet}. * @param resultSet The ResultSet to read records from. * @param base The Record base the cursor is iterating over. Possibly * {@code null} if the base is undefined. * @param options Any query options the records must match. * @throws SQLException on any SQLException reading the result set. * @throws IOException on any IOExceptions reading records. */ public ResultSetCursor(DatabaseStorage db, PreparedStatement stmt, ResultSet resultSet, String base, QueryOptions options) throws SQLException, IOException { this(db, stmt, resultSet, base, options, false); } /** * Create a new possible anonymous cursor. * * @param db The DatabaseStorage owning the cursor. * @param stmt The statement which produced {@code resultSet}. * @param resultSet The ResultSet to read records from. * @param base The Record base the cursor is iterating over. Possibly * {@code null} if the base is undefined. * @param options Any query options the records must match. * @param anonymous Anonymous cursors does less logging. They are suitable * for short lived, and intermediate, result sets. * @throws SQLException on any SQLException reading the result set. * @throws IOException on any IOExceptions reading records. */ public ResultSetCursor(DatabaseStorage db, PreparedStatement stmt, ResultSet resultSet, String base, QueryOptions options, boolean anonymous) throws SQLException, IOException { this.db = db; this.stmt = stmt; this.base = base; this.resultSet = resultSet; this.options = options; // The cursor start "outside" the result set, so step into it resultSetHasNext = resultSet.next(); // This also updates resultSetHasNext nextRecord = nextValidRecord(); // The generated timestamps a guaranteed to be unique, so no // cursor key clashes even within the same millisecond key = anonymous ? 0 : db.getTimestampGenerator().next(); // Extract the system time from when we generated the cursor key lastAccess = db.getTimestampGenerator().systemTime(key); firstAccess = 0; totalRecords = 0; if (!anonymous) { log.trace("Constructed with initial hasNext: " + resultSetHasNext + ", on base " + base); } if (resultSetHasNext) { log.debug("Constructed with initial hasNext: " + resultSetHasNext + ", nextValidRecord: '" + nextRecord + "', on base " + base); } else { log.trace("Constructed with initial hasNext: " + resultSetHasNext + ", nextValidRecord: '" + nextRecord + "', on base " + base); } } @Override public Connection getConnection() { try { return stmt.getConnection(); } catch (SQLException e) { throw new IllegalStateException("There should be an open connection for the inner statement", e); } } /** * Getter for last access time. * * @return last access time. */ @Override public long getLastAccess() { return lastAccess; } /** * Getter for query options. * * @return query options. */ @Override public QueryOptions getQueryOptions() { return options; } /** * Return the globally unique key for this cursor. If it was created * with {@code anonymous=true} then the key will not be unique, but always * be {@code 0}. * * @return {@code 0} if the cursor is anonymous. Otherwise the cursor's * globally unique key will be returned. */ @Override public long getKey() { lastAccess = System.currentTimeMillis(); return key; } /** * Getter for base. * * @return the base name. */ @Override public String getBase() { return base; } /** * Return true if this iterator has a next item. * Note: Side-effect updated lastAccess time. * * @return true if it has next item. */ @Override public boolean hasNext() { lastAccess = System.currentTimeMillis(); return nextRecord != null; } /** * Constructs a Record based on the row in the result set, then advances * to the next record. * * @return a Record based on the current row in the result set. */ @Override public Record next() { lastAccess = System.currentTimeMillis(); if (nextRecord == null) { throw new NoSuchElementException("Iterator " + key + " depleted"); } if (totalRecords == 0) { firstAccess = lastAccess; // Set firstAccess to 'now' } totalRecords++; Record record = nextRecord; currentMtimeTimestamp = nextMtimeTimestamp; try { nextRecord = nextValidRecord(); } catch (SQLException | IOException e) { log.warn("Error reading next record: " + e.getMessage(), e); nextRecord = null; } if (log.isTraceEnabled()) { log.trace("next returning '" + record.getId() + "'"); } /* The naive solution to updating resultSetHasNext is to just do: * resultSetHasNext = !resultSet.isAfterLast(); * here, but the ResultSet type does not allow this unless it is * of a scrollable type, which we do not use for resource limitation * purposes. Instead the resultSetHasNext state is updated in * scanRecord() */ return record; } /** * Remove first element, by calling next() and throwing the returned record * away. */ @Override public void remove() { if (hasNext()) { next(); } } /** * Return next valid record from the iterator. * * @return Next valid record. * @throws SQLException if database error occurred while fetching record. * @throws IOException if error occurred while fetching record. */ private Record nextValidRecord() throws SQLException, IOException { /*This check should _not_ use the method resultSetHasNext() because it is only the state of the resultSet that is important here. */ if (!resultSetHasNext) { logDepletedStats(); return null; } /* scanRecord() steps the resultSet to the next record. * It will update the state of the iterator appropriately. */ Record r = db.scanRecord(resultSet, this, options); // Allow all mode if (options == null) { return r; } while (resultSetHasNext && !options.allowsRecord(r)) { r = db.scanRecord(resultSet, this, options); } log.trace("Found allowed record (" + r.getId() + ", " + r.getBase() + ", " + r.isDeleted() + ", " + r.isIndexable() + ")"); // We don't need all information from a record. if (options.newRecordNeeded()) { log.trace("Creating new record (" + r.getId() + ", " + r.getBase() + ", " + r.isDeleted() + ", " + r.isIndexable()); return options.getNewRecord(r); } if (options.allowsRecord(r)) { log.debug("Record ID('" + r.getId() + "'') is allowed"); return r; } // If we end here there are no more records and the one we have // is not OK by the options resultSetHasNext = false; logDepletedStats(); return null; } /** * Log depleted stats to debug. */ private void logDepletedStats() { // Only log stats if this is a non-anonymous cursor if (key != 0) { log.debug(this + " depleted. After " + totalRecords + " records and " + (lastAccess - firstAccess) + "ms"); } } /** * Closes result set cursor. */ @Override public void close() { try { if (stmt.isClosed()) { if (log.isTraceEnabled()) { log.trace("Ignoring close request on iterator " + this + ". Already closed"); } return; } if (key != 0) { log.trace("Closing " + this); } resultSet.close(); stmt.close(); } catch (Exception e) { log.warn("Failed to close cursor statement " + stmt + ": " + e.getMessage(), e); } } /** * Called from * {@link DatabaseStorage#scanRecord(java.sql.ResultSet, ResultSetCursor, QueryOptions)}. * * @param resultSetHasNext the new value. */ public void setResultSetHasNext(boolean resultSetHasNext) { this.resultSetHasNext = resultSetHasNext; } /** * Called from * {@link DatabaseStorage#scanRecord(java.sql.ResultSet, ResultSetCursor, QueryOptions)} * with the raw mtime timestamp for the current Record as generated by * the {@link UniqueTimestampGenerator} of the {@link DatabaseStorage}. * <p/> * To make it clear; the timestamp set here is not the same as the Record's * modification time, but a unique timestamp that can be used by paging * models as offset for subsequent queries. * * @param mtimeTimestamp a timestamp in the binary format of a * {@link UniqueTimestampGenerator}. */ public void setRecordMtimeTimestamp(long mtimeTimestamp) { nextMtimeTimestamp = mtimeTimestamp; } /** * Return the timestamp for the modification time for the last * {@link Record} retrieved by calling {@link #next()}. * <p/> * The timestamp is <i>not</i> a standard system time value, but in the * binary format as generated by a {@link UniqueTimestampGenerator}. The * primary intent is to use this unique timestamp for {@link PagingCursor}. * * @return the raw, unique, binary timestamp of the last record returned by * this cursor. */ public long currentMtimeTimestamp() { return currentMtimeTimestamp; } /** * Returns a result set cursor string, which are unique, and in the format * 'ResultSetCursor[unique key]'. * * @return unique string defining this object. */ public String toString() { return "ResultSetCursor[" + key + "]"; } @Override public boolean needsExpansion() { return true; } }
package org.ihtsdo.otf.mapping.mojo; import java.util.HashSet; import java.util.Properties; import java.util.Set; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import org.apache.lucene.util.Version; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoFailureException; import org.hibernate.CacheMode; import org.hibernate.search.jpa.FullTextEntityManager; import org.hibernate.search.jpa.Search; import org.ihtsdo.otf.mapping.jpa.FeedbackConversationJpa; import org.ihtsdo.otf.mapping.jpa.MapProjectJpa; import org.ihtsdo.otf.mapping.jpa.MapRecordJpa; import org.ihtsdo.otf.mapping.reports.ReportJpa; import org.ihtsdo.otf.mapping.rf2.jpa.ConceptJpa; import org.ihtsdo.otf.mapping.rf2.jpa.TreePositionJpa; import org.ihtsdo.otf.mapping.services.helpers.ConfigUtility; import org.ihtsdo.otf.mapping.workflow.TrackingRecordJpa; /** * Goal which makes lucene indexes based on hibernate-search annotations. * * See admin/lucene/pom.xml for a sample execution. * * @goal reindex * * @phase package */ public class LuceneReindexMojo extends AbstractMojo { /** The manager. */ private EntityManager manager; /** * The specified objects to index * @parameter */ private String indexedObjects; /** * Instantiates a {@link LuceneReindexMojo} from the specified parameters. */ public LuceneReindexMojo() { // do nothing } /* * (non-Javadoc) * * @see org.apache.maven.plugin.Mojo#execute() */ @Override public void execute() throws MojoFailureException { getLog().info("Starting lucene reindexing"); getLog().info(" indexedObjects = " + indexedObjects); // set of objects to be re-indexed Set<String> objectsToReindex = new HashSet<>(); // if no parameter specified, re-index all objects if (indexedObjects == null) { objectsToReindex.add("ConceptJpa"); objectsToReindex.add("MapProjectJpa"); objectsToReindex.add("MapRecordJpa"); objectsToReindex.add("TreePositionJpa"); objectsToReindex.add("TrackingRecordJpa"); objectsToReindex.add("FeedbackConversationJpa"); objectsToReindex.add("ReportJpa"); // otherwise, construct set of indexed objects } else { // remove white-space and split by comma String[] objects = indexedObjects.replaceAll(" ", "").split(","); // add each value to the set for (String object : objects) objectsToReindex.add(object); } getLog().info("Starting reindexing for:"); for (String objectToReindex : objectsToReindex) { getLog().info(" " + objectToReindex); } try { Properties config = ConfigUtility.getConfigProperties(); EntityManagerFactory factory = Persistence.createEntityManagerFactory("MappingServiceDS", config); manager = factory.createEntityManager(); // full text entity manager FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(manager); fullTextEntityManager.setProperty("Version", Version.LUCENE_36); // Concepts if (objectsToReindex.contains("ConceptJpa")) { getLog().info(" Creating indexes for ConceptJpa"); fullTextEntityManager.purgeAll(ConceptJpa.class); fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(ConceptJpa.class) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).threadsForSubsequentFetching(8) .startAndWait(); objectsToReindex.remove("ConceptJpa"); } // Map Projects if (objectsToReindex.contains("MapProjectJpa")) { getLog().info(" Creating indexes for MapProjectJpa"); fullTextEntityManager.purgeAll(MapProjectJpa.class); fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(MapProjectJpa.class) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).threadsForSubsequentFetching(8) .startAndWait(); objectsToReindex.remove("MapProjectJpa"); } // Map Records if (objectsToReindex.contains("MapRecordJpa")) { getLog().info(" Creating indexes for MapRecordJpa"); fullTextEntityManager.purgeAll(MapRecordJpa.class); fullTextEntityManager.flushToIndexes(); fullTextEntityManager.setProperty(ROLE, ROLE); fullTextEntityManager.createIndexer(MapRecordJpa.class) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).threadsForSubsequentFetching(8) .startAndWait(); objectsToReindex.remove("MapRecordJpa"); } // Tree Positions if (objectsToReindex.contains("TreePositionJpa")) { getLog().info(" Creating indexes for TreePositionJpa"); fullTextEntityManager.purgeAll(TreePositionJpa.class); fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(TreePositionJpa.class) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).threadsForSubsequentFetching(8) .startAndWait(); objectsToReindex.remove("TreePositionJpa"); } // Tracking Records if (objectsToReindex.contains("TrackingRecordJpa")) { getLog().info(" Creating indexes for TrackingRecordJpa"); fullTextEntityManager.purgeAll(TrackingRecordJpa.class); fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(TrackingRecordJpa.class) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).threadsForSubsequentFetching(8) .startAndWait(); objectsToReindex.remove("TrackingRecordJpa"); } // Feedback Conversations if (objectsToReindex.contains("FeedbackConversationJpa")) { getLog().info(" Creating indexes for FeedbackConversationJpa"); fullTextEntityManager.purgeAll(FeedbackConversationJpa.class); fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(FeedbackConversationJpa.class) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).threadsForSubsequentFetching(8) .startAndWait(); objectsToReindex.remove("FeedbackConversationJpa"); } // Feedback Conversations if (objectsToReindex.contains("ReportJpa")) { getLog().info(" Creating indexes for ReportJpa"); fullTextEntityManager.purgeAll(ReportJpa.class); fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(ReportJpa.class) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).threadsForSubsequentFetching(8) .startAndWait(); objectsToReindex.remove("ReportJpa"); } if (objectsToReindex.size() != 0) { throw new MojoFailureException( "The following objects were specified for re-indexing, but do not exist as indexed objects: " + objectsToReindex.toString()); } // Cleanup manager.close(); factory.close(); getLog().info("done ..."); } catch (Exception e) { e.printStackTrace(); throw new MojoFailureException("Unexpected exception:", e); } } }
/* * Copyright (c) 2009-2012 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.effect; import com.jme3.bounding.BoundingBox; import com.jme3.effect.ParticleMesh.Type; import com.jme3.effect.influencers.DefaultParticleInfluencer; import com.jme3.effect.influencers.ParticleInfluencer; import com.jme3.effect.shapes.EmitterPointShape; import com.jme3.effect.shapes.EmitterShape; import com.jme3.export.InputCapsule; import com.jme3.export.JmeExporter; import com.jme3.export.JmeImporter; import com.jme3.export.OutputCapsule; import com.jme3.math.ColorRGBA; import com.jme3.math.FastMath; import com.jme3.math.Matrix3f; import com.jme3.math.Vector3f; import com.jme3.renderer.Camera; import com.jme3.renderer.RenderManager; import com.jme3.renderer.ViewPort; import com.jme3.renderer.queue.RenderQueue.Bucket; import com.jme3.renderer.queue.RenderQueue.ShadowMode; import com.jme3.scene.Geometry; import com.jme3.scene.Spatial; import com.jme3.scene.control.Control; import com.jme3.util.TempVars; import java.io.IOException; /** * <code>ParticleEmitter</code> is a special kind of geometry which simulates * a particle system. * <p> * Particle emitters can be used to simulate various kinds of phenomena, * such as fire, smoke, explosions and much more. * <p> * Particle emitters have many properties which are used to control the * simulation. The interpretation of these properties depends on the * {@link ParticleInfluencer} that has been assigned to the emitter via * {@link ParticleEmitter#setParticleInfluencer(com.jme3.effect.influencers.ParticleInfluencer) }. * By default the implementation {@link DefaultParticleInfluencer} is used. * * @author Kirill Vainer */ public class ParticleEmitter extends Geometry { private boolean enabled = true; private static final EmitterShape DEFAULT_SHAPE = new EmitterPointShape(Vector3f.ZERO); private static final ParticleInfluencer DEFAULT_INFLUENCER = new DefaultParticleInfluencer(); private ParticleEmitterControl control; private EmitterShape shape = DEFAULT_SHAPE; private ParticleMesh particleMesh; private ParticleInfluencer particleInfluencer = DEFAULT_INFLUENCER; private ParticleMesh.Type meshType; private Particle[] particles; private int firstUnUsed; private int lastUsed; // private int next = 0; // private ArrayList<Integer> unusedIndices = new ArrayList<Integer>(); private boolean randomAngle; private boolean selectRandomImage; private boolean facingVelocity; private float particlesPerSec = 20; private float timeDifference = 0; private float lowLife = 3f; private float highLife = 7f; private Vector3f gravity = new Vector3f(0.0f, 0.1f, 0.0f); private float rotateSpeed; private Vector3f faceNormal = new Vector3f(Vector3f.NAN); private int imagesX = 1; private int imagesY = 1; private ColorRGBA startColor = new ColorRGBA(0.4f, 0.4f, 0.4f, 0.5f); private ColorRGBA endColor = new ColorRGBA(0.1f, 0.1f, 0.1f, 0.0f); private float startSize = 0.2f; private float endSize = 2f; private boolean worldSpace = true; //variable that helps with computations private transient Vector3f temp = new Vector3f(); public static class ParticleEmitterControl implements Control { ParticleEmitter parentEmitter; public ParticleEmitterControl() { } public ParticleEmitterControl(ParticleEmitter parentEmitter) { this.parentEmitter = parentEmitter; } public Control cloneForSpatial(Spatial spatial) { return this; // WARNING: Sets wrong control on spatial. Will be // fixed automatically by ParticleEmitter.clone() method. } public void setSpatial(Spatial spatial) { } public void setEnabled(boolean enabled) { parentEmitter.setEnabled(enabled); } public boolean isEnabled() { return parentEmitter.isEnabled(); } public void update(float tpf) { parentEmitter.updateFromControl(tpf); } public void render(RenderManager rm, ViewPort vp) { parentEmitter.renderFromControl(rm, vp); } public void write(JmeExporter ex) throws IOException { } public void read(JmeImporter im) throws IOException { } } @Override public ParticleEmitter clone() { return clone(true); } @Override public ParticleEmitter clone(boolean cloneMaterial) { ParticleEmitter clone = (ParticleEmitter) super.clone(cloneMaterial); clone.shape = shape.deepClone(); // Reinitialize particle list clone.setNumParticles(particles.length); clone.faceNormal = faceNormal.clone(); clone.startColor = startColor.clone(); clone.endColor = endColor.clone(); clone.particleInfluencer = particleInfluencer.clone(); // remove original control from the clone clone.controls.remove(this.control); // put clone's control in clone.control = new ParticleEmitterControl(clone); clone.controls.add(clone.control); // Reinitialize particle mesh switch (meshType) { case Point: clone.particleMesh = new ParticlePointMesh(); clone.setMesh(clone.particleMesh); break; case Triangle: clone.particleMesh = new ParticleTriMesh(); clone.setMesh(clone.particleMesh); break; default: throw new IllegalStateException("Unrecognized particle type: " + meshType); } clone.particleMesh.initParticleData(clone, clone.particles.length); clone.particleMesh.setImagesXY(clone.imagesX, clone.imagesY); return clone; } public ParticleEmitter(String name, Type type, int numParticles) { super(name); setBatchHint(BatchHint.Never); // ignore world transform, unless user sets inLocalSpace this.setIgnoreTransform(true); // particles neither receive nor cast shadows this.setShadowMode(ShadowMode.Off); // particles are usually transparent this.setQueueBucket(Bucket.Transparent); meshType = type; // Must create clone of shape/influencer so that a reference to a static is // not maintained shape = shape.deepClone(); particleInfluencer = particleInfluencer.clone(); control = new ParticleEmitterControl(this); controls.add(control); switch (meshType) { case Point: particleMesh = new ParticlePointMesh(); this.setMesh(particleMesh); break; case Triangle: particleMesh = new ParticleTriMesh(); this.setMesh(particleMesh); break; default: throw new IllegalStateException("Unrecognized particle type: " + meshType); } this.setNumParticles(numParticles); // particleMesh.initParticleData(this, particles.length); } /** * For serialization only. Do not use. */ public ParticleEmitter() { super(); setBatchHint(BatchHint.Never); } public void setShape(EmitterShape shape) { this.shape = shape; } public EmitterShape getShape() { return shape; } /** * Set the {@link ParticleInfluencer} to influence this particle emitter. * * @param particleInfluencer the {@link ParticleInfluencer} to influence * this particle emitter. * * @see ParticleInfluencer */ public void setParticleInfluencer(ParticleInfluencer particleInfluencer) { this.particleInfluencer = particleInfluencer; } /** * Returns the {@link ParticleInfluencer} that influences this * particle emitter. * * @return the {@link ParticleInfluencer} that influences this * particle emitter. * * @see ParticleInfluencer */ public ParticleInfluencer getParticleInfluencer() { return particleInfluencer; } /** * Returns the mesh type used by the particle emitter. * * * @return the mesh type used by the particle emitter. * * @see #setMeshType(com.jme3.effect.ParticleMesh.Type) * @see ParticleEmitter#ParticleEmitter(java.lang.String, com.jme3.effect.ParticleMesh.Type, int) */ public ParticleMesh.Type getMeshType() { return meshType; } /** * Sets the type of mesh used by the particle emitter. * @param meshType The mesh type to use */ public void setMeshType(ParticleMesh.Type meshType) { this.meshType = meshType; switch (meshType) { case Point: particleMesh = new ParticlePointMesh(); this.setMesh(particleMesh); break; case Triangle: particleMesh = new ParticleTriMesh(); this.setMesh(particleMesh); break; default: throw new IllegalStateException("Unrecognized particle type: " + meshType); } this.setNumParticles(particles.length); } /** * Returns true if particles should spawn in world space. * * @return true if particles should spawn in world space. * * @see ParticleEmitter#setInWorldSpace(boolean) */ public boolean isInWorldSpace() { return worldSpace; } /** * Set to true if particles should spawn in world space. * * <p>If set to true and the particle emitter is moved in the scene, * then particles that have already spawned won't be effected by this * motion. If set to false, the particles will emit in local space * and when the emitter is moved, so are all the particles that * were emitted previously. * * @param worldSpace true if particles should spawn in world space. */ public void setInWorldSpace(boolean worldSpace) { this.setIgnoreTransform(worldSpace); this.worldSpace = worldSpace; } /** * Returns the number of visible particles (spawned but not dead). * * @return the number of visible particles */ public int getNumVisibleParticles() { // return unusedIndices.size() + next; return lastUsed + 1; } /** * Set the maximum amount of particles that * can exist at the same time with this emitter. * Calling this method many times is not recommended. * * @param numParticles the maximum amount of particles that * can exist at the same time with this emitter. */ public final void setNumParticles(int numParticles) { particles = new Particle[numParticles]; for (int i = 0; i < numParticles; i++) { particles[i] = new Particle(); } //We have to reinit the mesh's buffers with the new size particleMesh.initParticleData(this, particles.length); particleMesh.setImagesXY(this.imagesX, this.imagesY); firstUnUsed = 0; lastUsed = -1; } public int getMaxNumParticles() { return particles.length; } /** * Returns a list of all particles (shouldn't be used in most cases). * * <p> * This includes both existing and non-existing particles. * The size of the array is set to the <code>numParticles</code> value * specified in the constructor or {@link ParticleEmitter#setNumParticles(int) } * method. * * @return a list of all particles. */ public Particle[] getParticles() { return particles; } /** * Get the normal which particles are facing. * * @return the normal which particles are facing. * * @see ParticleEmitter#setFaceNormal(com.jme3.math.Vector3f) */ public Vector3f getFaceNormal() { if (Vector3f.isValidVector(faceNormal)) { return faceNormal; } else { return null; } } /** * Sets the normal which particles are facing. * * <p>By default, particles * will face the camera, but for some effects (e.g shockwave) it may * be necessary to face a specific direction instead. To restore * normal functionality, provide <code>null</code> as the argument for * <code>faceNormal</code>. * * @param faceNormal The normals particles should face, or <code>null</code> * if particles should face the camera. */ public void setFaceNormal(Vector3f faceNormal) { if (faceNormal == null || !Vector3f.isValidVector(faceNormal)) { this.faceNormal.set(Vector3f.NAN); } else { this.faceNormal = faceNormal; } } /** * Returns the rotation speed in radians/sec for particles. * * @return the rotation speed in radians/sec for particles. * * @see ParticleEmitter#setRotateSpeed(float) */ public float getRotateSpeed() { return rotateSpeed; } /** * Set the rotation speed in radians/sec for particles * spawned after the invocation of this method. * * @param rotateSpeed the rotation speed in radians/sec for particles * spawned after the invocation of this method. */ public void setRotateSpeed(float rotateSpeed) { this.rotateSpeed = rotateSpeed; } /** * Returns true if every particle spawned * should have a random facing angle. * * @return true if every particle spawned * should have a random facing angle. * * @see ParticleEmitter#setRandomAngle(boolean) */ public boolean isRandomAngle() { return randomAngle; } /** * Set to true if every particle spawned * should have a random facing angle. * * @param randomAngle if every particle spawned * should have a random facing angle. */ public void setRandomAngle(boolean randomAngle) { this.randomAngle = randomAngle; } /** * Returns true if every particle spawned should get a random * image. * * @return True if every particle spawned should get a random * image. * * @see ParticleEmitter#setSelectRandomImage(boolean) */ public boolean isSelectRandomImage() { return selectRandomImage; } /** * Set to true if every particle spawned * should get a random image from a pool of images constructed from * the texture, with X by Y possible images. * * <p>By default, X and Y are equal * to 1, thus allowing only 1 possible image to be selected, but if the * particle is configured with multiple images by using {@link ParticleEmitter#setImagesX(int) } * and {#link ParticleEmitter#setImagesY(int) } methods, then multiple images * can be selected. Setting to false will cause each particle to have an animation * of images displayed, starting at image 1, and going until image X*Y when * the particle reaches its end of life. * * @param selectRandomImage True if every particle spawned should get a random * image. */ public void setSelectRandomImage(boolean selectRandomImage) { this.selectRandomImage = selectRandomImage; } /** * Check if particles spawned should face their velocity. * * @return True if particles spawned should face their velocity. * * @see ParticleEmitter#setFacingVelocity(boolean) */ public boolean isFacingVelocity() { return facingVelocity; } /** * Set to true if particles spawned should face * their velocity (or direction to which they are moving towards). * * <p>This is typically used for e.g spark effects. * * @param followVelocity True if particles spawned should face their velocity. * */ public void setFacingVelocity(boolean followVelocity) { this.facingVelocity = followVelocity; } /** * Get the end color of the particles spawned. * * @return the end color of the particles spawned. * * @see ParticleEmitter#setEndColor(com.jme3.math.ColorRGBA) */ public ColorRGBA getEndColor() { return endColor; } /** * Set the end color of the particles spawned. * * <p>The * particle color at any time is determined by blending the start color * and end color based on the particle's current time of life relative * to its end of life. * * @param endColor the end color of the particles spawned. */ public void setEndColor(ColorRGBA endColor) { this.endColor.set(endColor); } /** * Get the end size of the particles spawned. * * @return the end size of the particles spawned. * * @see ParticleEmitter#setEndSize(float) */ public float getEndSize() { return endSize; } /** * Set the end size of the particles spawned. * * <p>The * particle size at any time is determined by blending the start size * and end size based on the particle's current time of life relative * to its end of life. * * @param endSize the end size of the particles spawned. */ public void setEndSize(float endSize) { this.endSize = endSize; } /** * Get the gravity vector. * * @return the gravity vector. * * @see ParticleEmitter#setGravity(com.jme3.math.Vector3f) */ public Vector3f getGravity() { return gravity; } /** * This method sets the gravity vector. * * @param gravity the gravity vector */ public void setGravity(Vector3f gravity) { this.gravity.set(gravity); } /** * Sets the gravity vector. * * @param x the x component of the gravity vector * @param y the y component of the gravity vector * @param z the z component of the gravity vector */ public void setGravity(float x, float y, float z) { this.gravity.x = x; this.gravity.y = y; this.gravity.z = z; } /** * Get the high value of life. * * @return the high value of life. * * @see ParticleEmitter#setHighLife(float) */ public float getHighLife() { return highLife; } /** * Set the high value of life. * * <p>The particle's lifetime/expiration * is determined by randomly selecting a time between low life and high life. * * @param highLife the high value of life. */ public void setHighLife(float highLife) { this.highLife = highLife; } /** * Get the number of images along the X axis (width). * * @return the number of images along the X axis (width). * * @see ParticleEmitter#setImagesX(int) */ public int getImagesX() { return imagesX; } /** * Set the number of images along the X axis (width). * * <p>To determine * how multiple particle images are selected and used, see the * {@link ParticleEmitter#setSelectRandomImage(boolean) } method. * * @param imagesX the number of images along the X axis (width). */ public void setImagesX(int imagesX) { this.imagesX = imagesX; particleMesh.setImagesXY(this.imagesX, this.imagesY); } /** * Get the number of images along the Y axis (height). * * @return the number of images along the Y axis (height). * * @see ParticleEmitter#setImagesY(int) */ public int getImagesY() { return imagesY; } /** * Set the number of images along the Y axis (height). * * <p>To determine how multiple particle images are selected and used, see the * {@link ParticleEmitter#setSelectRandomImage(boolean) } method. * * @param imagesY the number of images along the Y axis (height). */ public void setImagesY(int imagesY) { this.imagesY = imagesY; particleMesh.setImagesXY(this.imagesX, this.imagesY); } /** * Get the low value of life. * * @return the low value of life. * * @see ParticleEmitter#setLowLife(float) */ public float getLowLife() { return lowLife; } /** * Set the low value of life. * * <p>The particle's lifetime/expiration * is determined by randomly selecting a time between low life and high life. * * @param lowLife the low value of life. */ public void setLowLife(float lowLife) { this.lowLife = lowLife; } /** * Get the number of particles to spawn per * second. * * @return the number of particles to spawn per * second. * * @see ParticleEmitter#setParticlesPerSec(float) */ public float getParticlesPerSec() { return particlesPerSec; } /** * Set the number of particles to spawn per * second. * * @param particlesPerSec the number of particles to spawn per * second. */ public void setParticlesPerSec(float particlesPerSec) { this.particlesPerSec = particlesPerSec; timeDifference = 0; } /** * Get the start color of the particles spawned. * * @return the start color of the particles spawned. * * @see ParticleEmitter#setStartColor(com.jme3.math.ColorRGBA) */ public ColorRGBA getStartColor() { return startColor; } /** * Set the start color of the particles spawned. * * <p>The particle color at any time is determined by blending the start color * and end color based on the particle's current time of life relative * to its end of life. * * @param startColor the start color of the particles spawned */ public void setStartColor(ColorRGBA startColor) { this.startColor.set(startColor); } /** * Get the start color of the particles spawned. * * @return the start color of the particles spawned. * * @see ParticleEmitter#setStartSize(float) */ public float getStartSize() { return startSize; } /** * Set the start size of the particles spawned. * * <p>The particle size at any time is determined by blending the start size * and end size based on the particle's current time of life relative * to its end of life. * * @param startSize the start size of the particles spawned. */ public void setStartSize(float startSize) { this.startSize = startSize; } /** * @deprecated Use ParticleEmitter.getParticleInfluencer().getInitialVelocity() instead. */ @Deprecated public Vector3f getInitialVelocity() { return particleInfluencer.getInitialVelocity(); } /** * @param initialVelocity Set the initial velocity a particle is spawned with, * the initial velocity given in the parameter will be varied according * to the velocity variation set in {@link ParticleEmitter#setVelocityVariation(float) }. * A particle will move toward its velocity unless it is effected by the * gravity. * * @deprecated * This method is deprecated. * Use ParticleEmitter.getParticleInfluencer().setInitialVelocity(initialVelocity); instead. * * @see ParticleEmitter#setVelocityVariation(float) * @see ParticleEmitter#setGravity(float) */ @Deprecated public void setInitialVelocity(Vector3f initialVelocity) { this.particleInfluencer.setInitialVelocity(initialVelocity); } /** * @deprecated * This method is deprecated. * Use ParticleEmitter.getParticleInfluencer().getVelocityVariation(); instead. * @return the initial velocity variation factor */ @Deprecated public float getVelocityVariation() { return particleInfluencer.getVelocityVariation(); } /** * @param variation Set the variation by which the initial velocity * of the particle is determined. <code>variation</code> should be a value * from 0 to 1, where 0 means particles are to spawn with exactly * the velocity given in {@link ParticleEmitter#setStartVel(com.jme3.math.Vector3f) }, * and 1 means particles are to spawn with a completely random velocity. * * @deprecated * This method is deprecated. * Use ParticleEmitter.getParticleInfluencer().setVelocityVariation(variation); instead. */ @Deprecated public void setVelocityVariation(float variation) { this.particleInfluencer.setVelocityVariation(variation); } private Particle emitParticle(Vector3f min, Vector3f max) { int idx = lastUsed + 1; if (idx >= particles.length) { return null; } Particle p = particles[idx]; if (selectRandomImage) { p.imageIndex = FastMath.nextRandomInt(0, imagesY - 1) * imagesX + FastMath.nextRandomInt(0, imagesX - 1); } p.startlife = lowLife + FastMath.nextRandomFloat() * (highLife - lowLife); p.life = p.startlife; p.color.set(startColor); p.size = startSize; //shape.getRandomPoint(p.position); particleInfluencer.influenceParticle(p, shape); if (worldSpace) { worldTransform.transformVector(p.position, p.position); worldTransform.getRotation().mult(p.velocity, p.velocity); // TODO: Make scale relevant somehow?? } if (randomAngle) { p.angle = FastMath.nextRandomFloat() * FastMath.TWO_PI; } if (rotateSpeed != 0) { p.rotateSpeed = rotateSpeed * (0.2f + (FastMath.nextRandomFloat() * 2f - 1f) * .8f); } temp.set(p.position).addLocal(p.size, p.size, p.size); max.maxLocal(temp); temp.set(p.position).subtractLocal(p.size, p.size, p.size); min.minLocal(temp); ++lastUsed; firstUnUsed = idx + 1; return p; } /** * Instantly emits all the particles possible to be emitted. Any particles * which are currently inactive will be spawned immediately. */ public void emitAllParticles() { emitParticles(particles.length); } /** * Instantly emits available particles, up to num. */ public void emitParticles(int num) { // Force world transform to update this.getWorldTransform(); TempVars vars = TempVars.get(); BoundingBox bbox = (BoundingBox) this.getMesh().getBound(); Vector3f min = vars.vect1; Vector3f max = vars.vect2; bbox.getMin(min); bbox.getMax(max); if (!Vector3f.isValidVector(min)) { min.set(Vector3f.POSITIVE_INFINITY); } if (!Vector3f.isValidVector(max)) { max.set(Vector3f.NEGATIVE_INFINITY); } for(int i=0;i<num;i++) { if( emitParticle(min, max) == null ) break; } bbox.setMinMax(min, max); this.setBoundRefresh(); vars.release(); } /** * Instantly kills all active particles, after this method is called, all * particles will be dead and no longer visible. */ public void killAllParticles() { for (int i = 0; i < particles.length; ++i) { if (particles[i].life > 0) { this.freeParticle(i); } } } /** * Kills the particle at the given index. * * @param index The index of the particle to kill * @see #getParticles() */ public void killParticle(int index){ freeParticle(index); } private void freeParticle(int idx) { Particle p = particles[idx]; p.life = 0; p.size = 0f; p.color.set(0, 0, 0, 0); p.imageIndex = 0; p.angle = 0; p.rotateSpeed = 0; if (idx == lastUsed) { while (lastUsed >= 0 && particles[lastUsed].life == 0) { lastUsed--; } } if (idx < firstUnUsed) { firstUnUsed = idx; } } private void swap(int idx1, int idx2) { Particle p1 = particles[idx1]; particles[idx1] = particles[idx2]; particles[idx2] = p1; } private void updateParticle(Particle p, float tpf, Vector3f min, Vector3f max){ // applying gravity p.velocity.x -= gravity.x * tpf; p.velocity.y -= gravity.y * tpf; p.velocity.z -= gravity.z * tpf; temp.set(p.velocity).multLocal(tpf); p.position.addLocal(temp); // affecting color, size and angle float b = (p.startlife - p.life) / p.startlife; p.color.interpolateLocal(startColor, endColor, b); p.size = FastMath.interpolateLinear(b, startSize, endSize); p.angle += p.rotateSpeed * tpf; // Computing bounding volume temp.set(p.position).addLocal(p.size, p.size, p.size); max.maxLocal(temp); temp.set(p.position).subtractLocal(p.size, p.size, p.size); min.minLocal(temp); if (!selectRandomImage) { p.imageIndex = (int) (b * imagesX * imagesY); } } private void updateParticleState(float tpf) { // Force world transform to update this.getWorldTransform(); TempVars vars = TempVars.get(); Vector3f min = vars.vect1.set(Vector3f.POSITIVE_INFINITY); Vector3f max = vars.vect2.set(Vector3f.NEGATIVE_INFINITY); for (int i = 0; i < particles.length; ++i) { Particle p = particles[i]; if (p.life == 0) { // particle is dead // assert i <= firstUnUsed; continue; } p.life -= tpf; if (p.life <= 0) { this.freeParticle(i); continue; } updateParticle(p, tpf, min, max); if (firstUnUsed < i) { this.swap(firstUnUsed, i); if (i == lastUsed) { lastUsed = firstUnUsed; } firstUnUsed++; } } // Spawns particles within the tpf timeslot with proper age float interval = 1f / particlesPerSec; tpf += timeDifference; while (tpf > interval){ tpf -= interval; Particle p = emitParticle(min, max); if (p != null){ p.life -= tpf; if (p.life <= 0){ freeParticle(lastUsed); }else{ updateParticle(p, tpf, min, max); } } } timeDifference = tpf; BoundingBox bbox = (BoundingBox) this.getMesh().getBound(); bbox.setMinMax(min, max); this.setBoundRefresh(); vars.release(); } /** * Set to enable or disable the particle emitter * * <p>When a particle is * disabled, it will be "frozen in time" and not update. * * @param enabled True to enable the particle emitter */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * Check if a particle emitter is enabled for update. * * @return True if a particle emitter is enabled for update. * * @see ParticleEmitter#setEnabled(boolean) */ public boolean isEnabled() { return enabled; } /** * Callback from Control.update(), do not use. * @param tpf */ public void updateFromControl(float tpf) { if (enabled) { this.updateParticleState(tpf); } } /** * Callback from Control.render(), do not use. * * @param rm * @param vp */ private void renderFromControl(RenderManager rm, ViewPort vp) { Camera cam = vp.getCamera(); if (meshType == ParticleMesh.Type.Point) { float C = cam.getProjectionMatrix().m00; C *= cam.getWidth() * 0.5f; // send attenuation params this.getMaterial().setFloat("Quadratic", C); } Matrix3f inverseRotation = Matrix3f.IDENTITY; TempVars vars = null; if (!worldSpace) { vars = TempVars.get(); inverseRotation = this.getWorldRotation().toRotationMatrix(vars.tempMat3).invertLocal(); } particleMesh.updateParticleData(particles, cam, inverseRotation); if (!worldSpace) { vars.release(); } } public void preload(RenderManager rm, ViewPort vp) { this.updateParticleState(0); particleMesh.updateParticleData(particles, vp.getCamera(), Matrix3f.IDENTITY); } @Override public void write(JmeExporter ex) throws IOException { super.write(ex); OutputCapsule oc = ex.getCapsule(this); oc.write(shape, "shape", DEFAULT_SHAPE); oc.write(meshType, "meshType", ParticleMesh.Type.Triangle); oc.write(enabled, "enabled", true); oc.write(particles.length, "numParticles", 0); oc.write(particlesPerSec, "particlesPerSec", 0); oc.write(lowLife, "lowLife", 0); oc.write(highLife, "highLife", 0); oc.write(gravity, "gravity", null); oc.write(imagesX, "imagesX", 1); oc.write(imagesY, "imagesY", 1); oc.write(startColor, "startColor", null); oc.write(endColor, "endColor", null); oc.write(startSize, "startSize", 0); oc.write(endSize, "endSize", 0); oc.write(worldSpace, "worldSpace", false); oc.write(facingVelocity, "facingVelocity", false); oc.write(faceNormal, "faceNormal", new Vector3f(Vector3f.NAN)); oc.write(selectRandomImage, "selectRandomImage", false); oc.write(randomAngle, "randomAngle", false); oc.write(rotateSpeed, "rotateSpeed", 0); oc.write(particleInfluencer, "influencer", DEFAULT_INFLUENCER); } @Override public void read(JmeImporter im) throws IOException { super.read(im); InputCapsule ic = im.getCapsule(this); shape = (EmitterShape) ic.readSavable("shape", DEFAULT_SHAPE); if (shape == DEFAULT_SHAPE) { // Prevent reference to static shape = shape.deepClone(); } meshType = ic.readEnum("meshType", ParticleMesh.Type.class, ParticleMesh.Type.Triangle); int numParticles = ic.readInt("numParticles", 0); enabled = ic.readBoolean("enabled", true); particlesPerSec = ic.readFloat("particlesPerSec", 0); lowLife = ic.readFloat("lowLife", 0); highLife = ic.readFloat("highLife", 0); gravity = (Vector3f) ic.readSavable("gravity", null); imagesX = ic.readInt("imagesX", 1); imagesY = ic.readInt("imagesY", 1); startColor = (ColorRGBA) ic.readSavable("startColor", null); endColor = (ColorRGBA) ic.readSavable("endColor", null); startSize = ic.readFloat("startSize", 0); endSize = ic.readFloat("endSize", 0); worldSpace = ic.readBoolean("worldSpace", false); this.setIgnoreTransform(worldSpace); facingVelocity = ic.readBoolean("facingVelocity", false); faceNormal = (Vector3f)ic.readSavable("faceNormal", new Vector3f(Vector3f.NAN)); selectRandomImage = ic.readBoolean("selectRandomImage", false); randomAngle = ic.readBoolean("randomAngle", false); rotateSpeed = ic.readFloat("rotateSpeed", 0); switch (meshType) { case Point: particleMesh = new ParticlePointMesh(); this.setMesh(particleMesh); break; case Triangle: particleMesh = new ParticleTriMesh(); this.setMesh(particleMesh); break; default: throw new IllegalStateException("Unrecognized particle type: " + meshType); } this.setNumParticles(numParticles); // particleMesh.initParticleData(this, particles.length); // particleMesh.setImagesXY(imagesX, imagesY); particleInfluencer = (ParticleInfluencer) ic.readSavable("influencer", DEFAULT_INFLUENCER); if (particleInfluencer == DEFAULT_INFLUENCER) { particleInfluencer = particleInfluencer.clone(); } if (im.getFormatVersion() == 0) { // compatibility before the control inside particle emitter // was changed: // find it in the controls and take it out, then add the proper one in for (int i = 0; i < controls.size(); i++) { Object obj = controls.get(i); if (obj instanceof ParticleEmitter) { controls.remove(i); // now add the proper one in controls.add(new ParticleEmitterControl(this)); break; } } // compatability before gravity was not a vector but a float if (gravity == null) { gravity = new Vector3f(); gravity.y = ic.readFloat("gravity", 0); } } else { // since the parentEmitter is not loaded, it must be // loaded separately control = getControl(ParticleEmitterControl.class); control.parentEmitter = this; } } }