gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (c) 2012 Jan Kotek * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mapdb; import java.io.DataInput; import java.io.IOException; import java.io.Serializable; import java.util.*; /** * Functional stuff. Tuples, function, callback methods etc.. * * @author Jan Kotek */ public final class Fun { /** place holder for some stuff in future */ public static final Object PLACEHOLDER = new Object(){ @Override public String toString() { return "Fun.PLACEHOLDER"; } }; /** * A utility method for getting a type-safe Comparator, it provides type-inference help. * Use this method instead of {@link Fun#COMPARATOR} in order to insure type-safety * ex: {@code Comparator<Integer> comparator = getComparator();} * @return comparator */ public static <T> Comparator<T> comparator(){ return Fun.COMPARATOR; } /** * A utility method for getting a type-safe reversed Comparator (the negation of {@link Fun#comparator()}). * Use this method instead of {@link Fun#REVERSE_COMPARATOR} in order to insure type-safety * ex: {@code Comparator<Integer> comparator = getReversedComparator();} * @return comparator */ public static <T> Comparator<T> reverseComparator(){ return Fun.REVERSE_COMPARATOR; } @SuppressWarnings("rawtypes") public static final Comparator COMPARATOR = new Comparator<Comparable>() { @Override public int compare(Comparable o1, Comparable o2) { return o1.compareTo(o2); } }; @SuppressWarnings("rawtypes") public static final Comparator REVERSE_COMPARATOR = new Comparator<Comparable>() { @Override public int compare(Comparable o1, Comparable o2) { return -COMPARATOR.compare(o1,o2); } }; public static final Iterator EMPTY_ITERATOR = new ArrayList(0).iterator(); public static <T> Iterator<T> emptyIterator(){ return EMPTY_ITERATOR; } private Fun(){} /** returns true if all elements are equal, works with nulls*/ static public boolean eq(Object a, Object b) { return a==b || (a!=null && a.equals(b)); } public static long roundUp(long number, long roundUpToMultipleOf) { return ((number+roundUpToMultipleOf-1)/(roundUpToMultipleOf))*roundUpToMultipleOf; } /** Convert object to string, even if it is primitive array */ static String toString(Object keys) { if(keys instanceof long[]) return Arrays.toString((long[]) keys); else if(keys instanceof int[]) return Arrays.toString((int[]) keys); else if(keys instanceof byte[]) return Arrays.toString((byte[]) keys); else if(keys instanceof char[]) return Arrays.toString((char[]) keys); else if(keys instanceof float[]) return Arrays.toString((float[]) keys); else if(keys instanceof double[]) return Arrays.toString((double[]) keys); else if(keys instanceof boolean[]) return Arrays.toString((boolean[]) keys); else if(keys instanceof Object[]) return Arrays.toString((Object[]) keys); else return keys.toString(); } static public final class Pair<A,B> implements Comparable<Pair<A,B>>, Serializable { private static final long serialVersionUID = -8816277286657643283L; final public A a; final public B b; public Pair(A a, B b) { this.a = a; this.b = b; } /** constructor used for deserialization*/ protected Pair(SerializerBase serializer, DataInput in, SerializerBase.FastArrayList<Object> objectStack) throws IOException { objectStack.add(this); this.a = (A) serializer.deserialize(in, objectStack); this.b = (B) serializer.deserialize(in, objectStack); } @Override public int compareTo(Pair<A,B> o) { int i = ((Comparable<A>)a).compareTo(o.a); if(i!=0) return i; return ((Comparable<B>)b).compareTo(o.b); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final Pair<?, ?> t = (Pair<?,?>) o; return eq(a,t.a) && eq(b,t.b); } @Override public int hashCode() { int result = a != null ? a.hashCode() : 0; result = 31 * result + (b != null ? b.hashCode() : 0); return result; } @Override public String toString() { return "Pair[" + a +", "+b+"]"; } } /** function which takes no argument and returns one value*/ public interface Function0<R>{ R run(); } /** function which takes one argument and returns one value*/ public interface Function1<R,A>{ R run(A a); } /** function which takes one int argument and returns one value*/ public interface Function1Int<R>{ R run(int a); } /** function which takes two argument and returns one value*/ public interface Function2<R,A,B>{ R run(A a, B b); } public static <K,V> Fun.Function1<K,Pair<K,V>> extractKey(){ return new Fun.Function1<K, Pair<K, V>>() { @Override public K run(Pair<K, V> t) { return t.a; } }; } public static <K,V> Fun.Function1<V,Pair<K,V>> extractValue(){ return new Fun.Function1<V, Pair<K, V>>() { @Override public V run(Pair<K, V> t) { return t.b; } }; } public static <K,V> Fun.Function1<K,Map.Entry<K,V>> extractMapEntryKey(){ return new Fun.Function1<K, Map.Entry<K, V>>() { @Override public K run(Map.Entry<K, V> t) { return t.getKey(); } }; } public static <K,V> Fun.Function1<V,Map.Entry<K,V>> extractMapEntryValue(){ return new Fun.Function1<V, Map.Entry<K, V>>() { @Override public V run(Map.Entry<K, V> t) { return t.getValue(); } }; } /** returns function which always returns the value itself without transformation */ public static <K> Function1<K,K> extractNoTransform() { return new Function1<K, K>() { @Override public K run(K k) { return k; } }; } public static final Comparator<byte[]> BYTE_ARRAY_COMPARATOR = new Comparator<byte[]>() { @Override public int compare(byte[] o1, byte[] o2) { if(o1==o2) return 0; final int len = Math.min(o1.length,o2.length); for(int i=0;i<len;i++){ if(o1[i]==o2[i]) continue; if(o1[i]>o2[i]) return 1; return -1; } return compareInt(o1.length, o2.length); } }; public static final Comparator<char[]> CHAR_ARRAY_COMPARATOR = new Comparator<char[]>() { @Override public int compare(char[] o1, char[] o2) { if(o1==o2) return 0; final int len = Math.min(o1.length,o2.length); for(int i=0;i<len;i++){ if(o1[i]==o2[i]) continue; if(o1[i]>o2[i]) return 1; return -1; } return compareInt(o1.length, o2.length); } }; public static final Comparator<int[]> INT_ARRAY_COMPARATOR = new Comparator<int[]>() { @Override public int compare(int[] o1, int[] o2) { if(o1==o2) return 0; final int len = Math.min(o1.length,o2.length); for(int i=0;i<len;i++){ if(o1[i]==o2[i]) continue; if(o1[i]>o2[i]) return 1; return -1; } return compareInt(o1.length, o2.length); } }; public static final Comparator<long[]> LONG_ARRAY_COMPARATOR = new Comparator<long[]>() { @Override public int compare(long[] o1, long[] o2) { if(o1==o2) return 0; final int len = Math.min(o1.length,o2.length); for(int i=0;i<len;i++){ if(o1[i]==o2[i]) continue; if(o1[i]>o2[i]) return 1; return -1; } return compareInt(o1.length, o2.length); } }; public static final Comparator<double[]> DOUBLE_ARRAY_COMPARATOR = new Comparator<double[]>() { @Override public int compare(double[] o1, double[] o2) { if(o1==o2) return 0; final int len = Math.min(o1.length,o2.length); for(int i=0;i<len;i++){ if(o1[i]==o2[i]) continue; if(o1[i]>o2[i]) return 1; return -1; } return compareInt(o1.length, o2.length); } }; /** Compares two arrays which contains comparable elements */ public static final Comparator<Object[]> COMPARABLE_ARRAY_COMPARATOR = new Comparator<Object[]>() { @Override public int compare(Object[] o1, Object[] o2) { if(o1==o2) return 0; final int len = Math.min(o1.length,o2.length); for(int i=0;i<len;i++){ int r = Fun.COMPARATOR.compare(o1[i],o2[i]); if(r!=0) return r; } return compareInt(o1.length, o2.length); } }; /** compares two arrays using given comparators*/ public static final class ArrayComparator implements Comparator<Object[]>{ protected final Comparator[] comparators; public ArrayComparator(Comparator<?>[] comparators2) { this.comparators = comparators2.clone(); for(int i=0;i<this.comparators.length;i++){ if(this.comparators[i]==null) this.comparators[i] = Fun.COMPARATOR; } } /** constructor used for deserialization*/ protected ArrayComparator(SerializerBase serializer, DataInput in, SerializerBase.FastArrayList<Object> objectStack) throws IOException { objectStack.add(this); this.comparators = (Comparator[]) serializer.deserialize(in, objectStack); } @Override public int compare(Object[] o1, Object[] o2) { if(o1==o2) return 0; int len = Math.min(o1.length,o2.length); for(int i=0;i<len;i++){ int r = comparators[i].compare(o1[i],o2[i]); if(r!=0) return r; } return compareInt(o1.length, o2.length); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ArrayComparator that = (ArrayComparator) o; return Arrays.equals(comparators, that.comparators); } @Override public int hashCode() { return Arrays.hashCode(comparators); } } public static int compareInt(int x, int y) { return (x < y) ? -1 : ((x == y) ? 0 : 1); } public static int compareLong(long x, long y) { return (x < y) ? -1 : ((x == y) ? 0 : 1); } /** * TODO document tuples * * Find all Primary Keys associated with Secondary Key. * This is useful companion to {@link Bind#mapInverse(org.mapdb.Bind.MapWithModificationListener, java.util.Set)} * and {@link Bind#secondaryKey(org.mapdb.Bind.MapWithModificationListener, java.util.Set, org.mapdb.Fun.Function2)} * It can by also used to find values from 'MultiMap'. * * @param set Set or 'MultiMap' to find values in * @param keys key to look from * @return all keys where primary value equals to {@code secondaryKey} */ public static Iterable<Object[]> filter(final NavigableSet<Object[]> set, final Object... keys) { return new Iterable<Object[]>() { @Override public Iterator<Object[]> iterator() { final Iterator<Object[]> iter = set.tailSet(keys).iterator(); if(!iter.hasNext()) return Fun.EMPTY_ITERATOR; return new Iterator<Object[]>() { Object[] next = moveToNext(); Object[] moveToNext() { if(!iter.hasNext()) return null; Object[] next = iter.next(); if(next==null) return null; //check all elements are equal //TODO this does not work if byte[] etc is used in array. Document or fail! //TODO add special check for Fun.ARRAY comparator and use its sub-comparators for(int i=0;i<keys.length;i++){ if(!keys[i].equals(next[i])) return null; } return next; } @Override public boolean hasNext() { return next!=null; } @Override public Object[] next() { Object[] ret = next; if(ret == null) throw new NoSuchElementException(); next = moveToNext(); return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } /** decides if some action should be executed on an record*/ public interface RecordCondition<A>{ boolean run(final long recid, final A value, final Serializer<A> serializer); } /** record condition which always returns true*/ public static final RecordCondition RECORD_ALWAYS_TRUE = new RecordCondition() { @Override public boolean run(long recid, Object value, Serializer serializer) { return true; } }; }
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.contextmenu; import static org.chromium.chrome.browser.contextmenu.ContextMenuItemProperties.MENU_ID; import static org.chromium.chrome.browser.contextmenu.ContextMenuItemProperties.TEXT; import static org.chromium.chrome.browser.contextmenu.ContextMenuItemWithIconButtonProperties.BUTTON_CONTENT_DESC; import static org.chromium.chrome.browser.contextmenu.ContextMenuItemWithIconButtonProperties.BUTTON_IMAGE; import static org.chromium.chrome.browser.contextmenu.ContextMenuItemWithIconButtonProperties.BUTTON_MENU_ID; import android.app.Activity; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; import android.graphics.drawable.Drawable; import android.net.MailTo; import android.net.Uri; import android.text.TextUtils; import android.util.Pair; import android.webkit.MimeTypeMap; import android.webkit.URLUtil; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import org.chromium.base.Callback; import org.chromium.base.ContextUtils; import org.chromium.base.metrics.RecordHistogram; import org.chromium.base.supplier.Supplier; import org.chromium.chrome.R; import org.chromium.chrome.browser.compositor.bottombar.ephemeraltab.EphemeralTabCoordinator; import org.chromium.chrome.browser.contextmenu.ChromeContextMenuItem.Item; import org.chromium.chrome.browser.contextmenu.ContextMenuCoordinator.ListItemType; import org.chromium.chrome.browser.feature_engagement.TrackerFactory; import org.chromium.chrome.browser.firstrun.FirstRunStatus; import org.chromium.chrome.browser.flags.ChromeFeatureList; import org.chromium.chrome.browser.gsa.GSAState; import org.chromium.chrome.browser.lens.LensController; import org.chromium.chrome.browser.lens.LensEntryPoint; import org.chromium.chrome.browser.lens.LensIntentParams; import org.chromium.chrome.browser.lens.LensMetrics; import org.chromium.chrome.browser.locale.LocaleManager; import org.chromium.chrome.browser.performance_hints.PerformanceHintsObserver; import org.chromium.chrome.browser.performance_hints.PerformanceHintsObserver.PerformanceClass; import org.chromium.chrome.browser.preferences.ChromePreferenceKeys; import org.chromium.chrome.browser.preferences.SharedPreferencesManager; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.read_later.ReadingListUtils; import org.chromium.chrome.browser.search_engines.TemplateUrlServiceFactory; import org.chromium.chrome.browser.share.ChromeShareExtras; import org.chromium.chrome.browser.share.LensUtils; import org.chromium.chrome.browser.share.ShareDelegate; import org.chromium.chrome.browser.share.ShareDelegate.ShareOrigin; import org.chromium.chrome.browser.share.ShareHelper; import org.chromium.chrome.browser.share.link_to_text.LinkToTextHelper; import org.chromium.chrome.browser.tasks.tab_management.TabUiFeatureUtilities; import org.chromium.components.browser_ui.share.ShareParams; import org.chromium.components.embedder_support.contextmenu.ContextMenuParams; import org.chromium.components.embedder_support.util.UrlUtilities; import org.chromium.components.externalauth.ExternalAuthUtils; import org.chromium.components.feature_engagement.FeatureConstants; import org.chromium.components.feature_engagement.Tracker; import org.chromium.components.search_engines.TemplateUrlService; import org.chromium.components.ukm.UkmRecorder; import org.chromium.components.url_formatter.UrlFormatter; import org.chromium.content_public.browser.BrowserStartupController; import org.chromium.content_public.browser.WebContents; import org.chromium.content_public.common.ContentUrlConstants; import org.chromium.ui.base.DeviceFormFactor; import org.chromium.ui.base.WindowAndroid; import org.chromium.ui.modelutil.MVCListAdapter.ListItem; import org.chromium.ui.modelutil.MVCListAdapter.ModelList; import org.chromium.ui.modelutil.PropertyModel; import org.chromium.url.GURL; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; // Vivaldi import org.chromium.build.BuildConfig; import org.chromium.chrome.browser.ChromeApplicationImpl; /** * A {@link ContextMenuPopulator} used for showing the default Chrome context menu. */ public class ChromeContextMenuPopulator implements ContextMenuPopulator { private final Context mContext; private final ContextMenuItemDelegate mItemDelegate; private final @ContextMenuMode int mMode; private final Supplier<ShareDelegate> mShareDelegateSupplier; private final ExternalAuthUtils mExternalAuthUtils; private final ContextMenuParams mParams; private @Nullable UkmRecorder.Bridge mUkmRecorderBridge; private ContextMenuNativeDelegate mNativeDelegate; private static final String LENS_SEARCH_MENU_ITEM_KEY = "searchWithGoogleLensMenuItem"; private static final String LENS_SHOP_MENU_ITEM_KEY = "shopWithGoogleLensMenuItem"; private static final String SEARCH_BY_IMAGE_MENU_ITEM_KEY = "searchByImageMenuItem"; private static final String LENS_SUPPORT_STATUS_HISTOGRAM_NAME = "ContextMenu.LensSupportStatus"; // True when the tracker indicates IPH in the form of "new" label needs to be shown. private Boolean mShowEphemeralTabNewLabel; /** * Defines the Groups of each Context Menu Item */ @IntDef({ContextMenuGroup.LINK, ContextMenuGroup.IMAGE, ContextMenuGroup.VIDEO}) @Retention(RetentionPolicy.SOURCE) public @interface ContextMenuGroup { int LINK = 0; int IMAGE = 1; int VIDEO = 2; } /** * Defines the context menu modes */ @IntDef({ContextMenuMode.NORMAL, ContextMenuMode.CUSTOM_TAB, ContextMenuMode.WEB_APP}) @Retention(RetentionPolicy.SOURCE) public @interface ContextMenuMode { int NORMAL = 0; /* Default mode*/ int CUSTOM_TAB = 1; /* Custom tab mode */ int WEB_APP = 2; /* Full screen mode */ } static class ContextMenuUma { // Note: these values must match the ContextMenuOptionAndroid enum in enums.xml. // Only add values to the end, right before NUM_ENTRIES! @IntDef({Action.OPEN_IN_NEW_TAB, Action.OPEN_IN_INCOGNITO_TAB, Action.COPY_LINK_ADDRESS, Action.COPY_EMAIL_ADDRESS, Action.COPY_LINK_TEXT, Action.SAVE_LINK, Action.SAVE_IMAGE, Action.OPEN_IMAGE, Action.OPEN_IMAGE_IN_NEW_TAB, Action.SEARCH_BY_IMAGE, Action.LOAD_ORIGINAL_IMAGE, Action.SAVE_VIDEO, Action.SHARE_IMAGE, Action.OPEN_IN_OTHER_WINDOW, Action.OPEN_IN_NEW_WINDOW, Action.SEND_EMAIL, Action.ADD_TO_CONTACTS, Action.CALL, Action.SEND_TEXT_MESSAGE, Action.COPY_PHONE_NUMBER, Action.OPEN_IN_NEW_CHROME_TAB, Action.OPEN_IN_CHROME_INCOGNITO_TAB, Action.OPEN_IN_BROWSER, Action.OPEN_IN_CHROME, Action.SHARE_LINK, Action.OPEN_IN_EPHEMERAL_TAB, Action.OPEN_IMAGE_IN_EPHEMERAL_TAB, Action.DIRECT_SHARE_LINK, Action.DIRECT_SHARE_IMAGE, Action.SEARCH_WITH_GOOGLE_LENS, Action.COPY_IMAGE, Action.SHOP_IMAGE_WITH_GOOGLE_LENS, Action.READ_LATER, Action.SHOP_WITH_GOOGLE_LENS_CHIP, Action.TRANSLATE_WITH_GOOGLE_LENS_CHIP, Action.SHARE_HIGHLIGHT, Action.REMOVE_HIGHLIGHT, Action.LEARN_MORE, Action.OPEN_IN_NEW_TAB_IN_GROUP}) @Retention(RetentionPolicy.SOURCE) public @interface Action { int OPEN_IN_NEW_TAB = 0; int OPEN_IN_INCOGNITO_TAB = 1; int COPY_LINK_ADDRESS = 2; int COPY_EMAIL_ADDRESS = 3; int COPY_LINK_TEXT = 4; int SAVE_LINK = 5; int SAVE_IMAGE = 6; int OPEN_IMAGE = 7; int OPEN_IMAGE_IN_NEW_TAB = 8; int SEARCH_BY_IMAGE = 9; int LOAD_ORIGINAL_IMAGE = 10; int SAVE_VIDEO = 11; int SHARE_IMAGE = 12; int OPEN_IN_OTHER_WINDOW = 13; int SEND_EMAIL = 14; int ADD_TO_CONTACTS = 15; int CALL = 16; int SEND_TEXT_MESSAGE = 17; int COPY_PHONE_NUMBER = 18; int OPEN_IN_NEW_CHROME_TAB = 19; int OPEN_IN_CHROME_INCOGNITO_TAB = 20; int OPEN_IN_BROWSER = 21; int OPEN_IN_CHROME = 22; int SHARE_LINK = 23; int OPEN_IN_EPHEMERAL_TAB = 24; int OPEN_IMAGE_IN_EPHEMERAL_TAB = 25; int DIRECT_SHARE_LINK = 26; int DIRECT_SHARE_IMAGE = 27; int SEARCH_WITH_GOOGLE_LENS = 28; int COPY_IMAGE = 29; // int SHOP_SIMILAR_PRODUCTS = 30; Deprecated since 06/2021. int SHOP_IMAGE_WITH_GOOGLE_LENS = 31; // int SEARCH_SIMILAR_PRODUCTS = 32; // Deprecated since 06/2021. int READ_LATER = 33; int SHOP_WITH_GOOGLE_LENS_CHIP = 34; int TRANSLATE_WITH_GOOGLE_LENS_CHIP = 35; int SHARE_HIGHLIGHT = 36; int REMOVE_HIGHLIGHT = 37; int LEARN_MORE = 38; int OPEN_IN_NEW_TAB_IN_GROUP = 39; int OPEN_IN_NEW_WINDOW = 40; int NUM_ENTRIES = 41; } // Note: these values must match the ContextMenuSaveLinkType enum in enums.xml. // Only add new values at the end, right before NUM_TYPES. We depend on these specific // values in UMA histograms. @IntDef({Type.UNKNOWN, Type.TEXT, Type.IMAGE, Type.AUDIO, Type.VIDEO, Type.PDF}) @Retention(RetentionPolicy.SOURCE) public @interface Type { int UNKNOWN = 0; int TEXT = 1; int IMAGE = 2; int AUDIO = 3; int VIDEO = 4; int PDF = 5; int NUM_ENTRIES = 6; } // Note: these values must match the ContextMenuSaveImage enum in enums.xml. // Only add new values at the end, right before NUM_ENTRIES. @IntDef({TypeSaveImage.LOADED, TypeSaveImage.NOT_DOWNLOADABLE, TypeSaveImage.DISABLED_AND_IS_NOT_IMAGE_PARAM, TypeSaveImage.DISABLED_AND_IS_IMAGE_PARAM, TypeSaveImage.SHOWN}) @Retention(RetentionPolicy.SOURCE) public @interface TypeSaveImage { int LOADED = 0; // int FETCHED_LOFI = 1; deprecated int NOT_DOWNLOADABLE = 2; int DISABLED_AND_IS_NOT_IMAGE_PARAM = 3; int DISABLED_AND_IS_IMAGE_PARAM = 4; int SHOWN = 5; int NUM_ENTRIES = 6; } // This is used for recording the enum histogram: // * ContextMenu.SelectedOptionAndroid.ImageLink.NewTabOption // * ContextMenu.SelectedOptionAndroid.Link.NewTabOption // OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB means the context menu shows the // 'open in new tab' item before the 'open in new tab in group' item and the // 'open in new tab' item is selected. // OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP means the context menu shows the // 'open in new tab' item before the 'open in new tab in group' item and the // 'open in new tab in group' item is selected. // OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB means the context menu shows the // 'open in new tab in group' item before the 'open in new tab' item and the // 'open in new tab' item is selected. // OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP means the context menu // shows the // 'open in new tab in group' item before the 'open in new tab' item and the // 'open in new tab in group' item is selected. @IntDef({SelectedNewTabCreationEnum.OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB, SelectedNewTabCreationEnum.OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP, SelectedNewTabCreationEnum.OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB, SelectedNewTabCreationEnum .OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP}) @Retention(RetentionPolicy.SOURCE) private @interface SelectedNewTabCreationEnum { int OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB = 0; int OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP = 1; int OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB = 2; int OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP = 3; int NUM_ENTRIES = 4; } /** * Records a histogram entry when the user selects an item from a context menu. * @param params The ContextMenuParams describing the current context menu. * @param action The action that the user selected (e.g. ACTION_SAVE_IMAGE). */ static void record(WebContents webContents, ContextMenuParams params, @Action int action) { String histogramName = String.format("ContextMenu.SelectedOptionAndroid.%s", ContextMenuUtils.getContextMenuTypeForHistogram(params)); RecordHistogram.recordEnumeratedHistogram(histogramName, action, Action.NUM_ENTRIES); if (!params.isVideo() && params.isImage() && LensUtils.isInShoppingAllowlist(params.getPageUrl())) { String shoppingHistogramName = params.isAnchor() ? "ContextMenu.SelectedOptionAndroid.ImageLink.ShoppingDomain" : "ContextMenu.SelectedOptionAndroid.Image.ShoppingDomain"; RecordHistogram.recordEnumeratedHistogram( shoppingHistogramName, action, Action.NUM_ENTRIES); } if (params.isAnchor() && !params.isVideo() && !params.getOpenedFromHighlight()) { if (params.isImage()) { assert histogramName.equals("ContextMenu.SelectedOptionAndroid.ImageLink"); } else { assert histogramName.equals("ContextMenu.SelectedOptionAndroid.Link"); } tryToRecordGroupRelatedHistogram(histogramName, action); } if (params.isAnchor() && PerformanceHintsObserver.getPerformanceClassForURL( webContents, params.getLinkUrl()) == PerformanceClass.PERFORMANCE_FAST) { RecordHistogram.recordEnumeratedHistogram( histogramName + ".PerformanceClassFast", action, Action.NUM_ENTRIES); } } private static void tryToRecordGroupRelatedHistogram( String histogramName, @Action int action) { if (TabUiFeatureUtilities.ENABLE_TAB_GROUP_AUTO_CREATION.getValue()) return; boolean openInGroupShownFirst = TabUiFeatureUtilities.showContextMenuOpenNewTabInGroupItemFirst(); @SelectedNewTabCreationEnum int selectedNewTabCreationEnum = SelectedNewTabCreationEnum.OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB; if (action == Action.OPEN_IN_NEW_TAB) { if (openInGroupShownFirst) { selectedNewTabCreationEnum = SelectedNewTabCreationEnum .OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB; } } else if (action == Action.OPEN_IN_NEW_TAB_IN_GROUP) { selectedNewTabCreationEnum = openInGroupShownFirst ? SelectedNewTabCreationEnum .OPEN_IN_NEW_TAB_IN_GROUP_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP : SelectedNewTabCreationEnum .OPEN_IN_NEW_TAB_FIRST_SELECTED_OPEN_IN_NEW_TAB_IN_GROUP; } RecordHistogram.recordEnumeratedHistogram(histogramName + ".NewTabOption", selectedNewTabCreationEnum, SelectedNewTabCreationEnum.NUM_ENTRIES); } /** * Records the content types when user downloads the file by long pressing the * save link context menu option. */ static void recordSaveLinkTypes(GURL url) { String extension = MimeTypeMap.getFileExtensionFromUrl(url.getSpec()); @Type int mimeType = Type.UNKNOWN; if (extension != null) { String type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); if (type != null) { if (type.startsWith("text")) { mimeType = Type.TEXT; } else if (type.startsWith("image")) { mimeType = Type.IMAGE; } else if (type.startsWith("audio")) { mimeType = Type.AUDIO; } else if (type.startsWith("video")) { mimeType = Type.VIDEO; } else if (type.equals("application/pdf")) { mimeType = Type.PDF; } } } RecordHistogram.recordEnumeratedHistogram( "ContextMenu.SaveLinkType", mimeType, Type.NUM_ENTRIES); } /** * Helper method to record MobileDownload.ContextMenu.SaveImage UMA * @param type Type to record */ static void recordSaveImageUma(int type) { RecordHistogram.recordEnumeratedHistogram( "MobileDownload.ContextMenu.SaveImage", type, TypeSaveImage.NUM_ENTRIES); } } /** * Builds a {@link ChromeContextMenuPopulator}. * @param itemDelegate The {@link ContextMenuItemDelegate} that will be notified with actions * to perform when menu items are selected. * @param shareDelegate The Supplier of {@link ShareDelegate} that will be notified when a share * action is performed. * @param mode Defines the context menu mode * @param externalAuthUtils {@link ExternalAuthUtils} instance. * @param context The {@link Context} used to retrieve the strings. * @param params The {@link ContextMenuParams} to populate the menu items. * @param nativeDelegate The {@link ContextMenuNativeDelegate} used to interact with native. */ public ChromeContextMenuPopulator(ContextMenuItemDelegate itemDelegate, Supplier<ShareDelegate> shareDelegate, @ContextMenuMode int mode, ExternalAuthUtils externalAuthUtils, Context context, ContextMenuParams params, ContextMenuNativeDelegate nativeDelegate) { mItemDelegate = itemDelegate; mShareDelegateSupplier = shareDelegate; mMode = mode; mExternalAuthUtils = externalAuthUtils; mContext = context; mParams = params; mNativeDelegate = nativeDelegate; } /** * Gets the link of the item or empty text if the Url is empty. * @return A string with the link or an empty string. */ public static String createUrlText(ContextMenuParams params) { if (!isEmptyUrl(params.getLinkUrl())) { return getUrlText(params); } return ""; } private static String getUrlText(ContextMenuParams params) { // ContextMenuParams can only be created after the browser has started. assert BrowserStartupController.getInstance().isFullBrowserStarted(); return UrlFormatter.formatUrlForDisplayOmitSchemeOmitTrivialSubdomains( params.getLinkUrl().getSpec()); } @VisibleForTesting boolean isTabletScreen() { return DeviceFormFactor.isNonMultiDisplayContextOnTablet(mContext); } @Override public List<Pair<Integer, ModelList>> buildContextMenu() { boolean hasSaveImage = false; mShowEphemeralTabNewLabel = null; List<Pair<Integer, ModelList>> groupedItems = new ArrayList<>(); if (mParams.isAnchor()) { ModelList linkGroup = new ModelList(); if (FirstRunStatus.getFirstRunFlowComplete() && !isEmptyUrl(mParams.getUrl()) && UrlUtilities.isAcceptedScheme(mParams.getUrl())) { if (mMode == ContextMenuMode.NORMAL) { if (TabUiFeatureUtilities.ENABLE_TAB_GROUP_AUTO_CREATION.getValue()) { linkGroup.add(createListItem(Item.OPEN_IN_NEW_TAB)); // Note(david@vivaldi.com): Only add item when stacking is enabled. if (TabUiFeatureUtilities.isTabGroupsAndroidEnabled(mContext)) linkGroup.add(createListItem(Item.OPEN_IN_NEW_TAB_IN_GROUP)); } else { // Note(david@vivaldi.com): Only add item when stacking is enabled. if (TabUiFeatureUtilities.isTabGroupsAndroidEnabled(mContext)) if (TabUiFeatureUtilities.showContextMenuOpenNewTabInGroupItemFirst()) { linkGroup.add(createListItem(Item.OPEN_IN_NEW_TAB_IN_GROUP)); linkGroup.add(createListItem(Item.OPEN_IN_NEW_TAB)); } else { linkGroup.add(createListItem(Item.OPEN_IN_NEW_TAB)); linkGroup.add(createListItem(Item.OPEN_IN_NEW_TAB_IN_GROUP)); } } // Vivaldi linkGroup.add(createListItem(Item.OPEN_IN_NEW_TAB_BACKGROUND)); if (!mItemDelegate.isIncognito() && mItemDelegate.isIncognitoSupported()) { linkGroup.add(createListItem(Item.OPEN_IN_INCOGNITO_TAB)); } if (mItemDelegate.isOpenInOtherWindowSupported()) { linkGroup.add(createListItem(Item.OPEN_IN_OTHER_WINDOW)); } else if (isTabletScreen() && mItemDelegate.canEnterMultiWindowMode()) { linkGroup.add(createListItem(Item.OPEN_IN_NEW_WINDOW)); } } if ((mMode == ContextMenuMode.NORMAL || mMode == ContextMenuMode.CUSTOM_TAB) && EphemeralTabCoordinator.isSupported()) { mShowEphemeralTabNewLabel = shouldTriggerEphemeralTabHelpUi(); linkGroup.add( createListItem(Item.OPEN_IN_EPHEMERAL_TAB, mShowEphemeralTabNewLabel)); } } if (!MailTo.isMailTo(mParams.getLinkUrl().getSpec()) && !UrlUtilities.isTelScheme(mParams.getLinkUrl())) { linkGroup.add(createListItem(Item.COPY_LINK_ADDRESS)); if (!mParams.getLinkText().trim().isEmpty() && !mParams.isImage()) { linkGroup.add(createListItem(Item.COPY_LINK_TEXT)); } } if (FirstRunStatus.getFirstRunFlowComplete()) { if (!mItemDelegate.isIncognito() && UrlUtilities.isDownloadableScheme(mParams.getLinkUrl()) && !BuildConfig.IS_OEM_AUTOMOTIVE_BUILD) { // Vivaldi [POLE-10] linkGroup.add(createListItem(Item.SAVE_LINK_AS)); } if (!mParams.isImage() && ChromeFeatureList.isEnabled(ChromeFeatureList.READ_LATER) && ReadingListUtils.isReadingListSupported(mParams.getLinkUrl())) { linkGroup.add(createListItem(Item.READ_LATER, shouldTriggerReadLaterHelpUi())); } // Vivaldi if (!BuildConfig.IS_OEM_AUTOMOTIVE_BUILD) linkGroup.add(createShareListItem(Item.SHARE_LINK, Item.DIRECT_SHARE_LINK)); if (UrlUtilities.isTelScheme(mParams.getLinkUrl())) { if (mItemDelegate.supportsCall()) { linkGroup.add(createListItem(Item.CALL)); } if (mItemDelegate.supportsSendTextMessage()) { linkGroup.add(createListItem(Item.SEND_MESSAGE)); } if (mItemDelegate.supportsAddToContacts()) { linkGroup.add(createListItem(Item.ADD_TO_CONTACTS)); } } if (MailTo.isMailTo(mParams.getLinkUrl().getSpec())) { if (mItemDelegate.supportsSendEmailMessage()) { linkGroup.add(createListItem(Item.SEND_MESSAGE)); } if (!TextUtils.isEmpty(MailTo.parse(mParams.getLinkUrl().getSpec()).getTo()) && mItemDelegate.supportsAddToContacts()) { linkGroup.add(createListItem(Item.ADD_TO_CONTACTS)); } } } if (UrlUtilities.isTelScheme(mParams.getLinkUrl()) || MailTo.isMailTo(mParams.getLinkUrl().getSpec())) { linkGroup.add(createListItem(Item.COPY)); } if (linkGroup.size() > 0) { groupedItems.add(new Pair<>(R.string.contextmenu_link_title, linkGroup)); } } if (mParams.isImage() && FirstRunStatus.getFirstRunFlowComplete()) { ModelList imageGroup = new ModelList(); boolean isSrcDownloadableScheme = UrlUtilities.isDownloadableScheme(mParams.getSrcUrl()); boolean showLensShoppingMenuItem = false; // Avoid showing open image option for same image which is already opened. if (mMode == ContextMenuMode.CUSTOM_TAB && !mItemDelegate.getPageUrl().equals(mParams.getSrcUrl())) { imageGroup.add(createListItem(Item.OPEN_IMAGE)); } if (mMode == ContextMenuMode.NORMAL) { imageGroup.add(createListItem(Item.OPEN_IMAGE_IN_NEW_TAB)); } if ((mMode == ContextMenuMode.NORMAL || mMode == ContextMenuMode.CUSTOM_TAB) && EphemeralTabCoordinator.isSupported()) { if (mShowEphemeralTabNewLabel == null) { mShowEphemeralTabNewLabel = shouldTriggerEphemeralTabHelpUi(); } imageGroup.add(createListItem( Item.OPEN_IMAGE_IN_EPHEMERAL_TAB, mShowEphemeralTabNewLabel)); } imageGroup.add(createListItem(Item.COPY_IMAGE)); if (isSrcDownloadableScheme && !BuildConfig.IS_OEM_AUTOMOTIVE_BUILD) { // Vivaldi [POLE-10] imageGroup.add(createListItem(Item.SAVE_IMAGE)); hasSaveImage = true; } // If set, show 'Share Image' before 'Search with Google Lens'. // IMPORTANT: Must stay consistent with logic after the below Lens block. boolean addedShareImageAboveLens = false; if (LensUtils.orderShareImageBeforeLens()) { // Vivaldi if (!BuildConfig.IS_OEM_AUTOMOTIVE_BUILD) { addedShareImageAboveLens = true; imageGroup.add(createShareListItem(Item.SHARE_IMAGE, Item.DIRECT_SHARE_IMAGE)); } // Vivaldi } if (mMode == ContextMenuMode.CUSTOM_TAB || mMode == ContextMenuMode.NORMAL) { if (checkSupportsGoogleSearchByImage(isSrcDownloadableScheme)) { // All behavior relating to Lens integration is gated by Feature Flag. // A map to indicate which image search menu item would be shown. Map<String, Boolean> imageSearchMenuItemsToShow = getSearchByImageMenuItemsToShowAndRecordMetrics( mParams.getPageUrl(), mItemDelegate.isIncognito()); if (imageSearchMenuItemsToShow.get(LENS_SEARCH_MENU_ITEM_KEY)) { imageGroup.add(createListItem(Item.SEARCH_WITH_GOOGLE_LENS, true)); maybeRecordUkmLensShown(); } else if (imageSearchMenuItemsToShow.get(SEARCH_BY_IMAGE_MENU_ITEM_KEY)) { imageGroup.add(createListItem(Item.SEARCH_BY_IMAGE)); maybeRecordUkmSearchByImageShown(); } // Check whether we should show Lens Shopping menu item. if (imageSearchMenuItemsToShow.get(LENS_SHOP_MENU_ITEM_KEY)) { showLensShoppingMenuItem = true; } } else if (ChromeFeatureList.isEnabled( ChromeFeatureList.CONTEXT_MENU_SEARCH_WITH_GOOGLE_LENS)) { LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.SEARCH_BY_IMAGE_UNAVAILABLE); } } // By default show 'Share Image' after 'Search with Google Lens'. // IMPORTANT: Must stay consistent with logic before the above Lens block. // Vivaldi if (!BuildConfig.IS_OEM_AUTOMOTIVE_BUILD) if (!addedShareImageAboveLens) { imageGroup.add(createShareListItem(Item.SHARE_IMAGE, Item.DIRECT_SHARE_IMAGE)); } // Show Lens Shopping Menu Item when the Lens Shopping feature is supported. if (showLensShoppingMenuItem) { imageGroup.add(createListItem(Item.SHOP_IMAGE_WITH_GOOGLE_LENS, true)); maybeRecordUkmLensShoppingShown(); } recordSaveImageContextMenuResult(isSrcDownloadableScheme); groupedItems.add(new Pair<>(R.string.contextmenu_image_title, imageGroup)); } if (mParams.isVideo() && FirstRunStatus.getFirstRunFlowComplete() && mParams.canSaveMedia() && UrlUtilities.isDownloadableScheme(mParams.getSrcUrl()) && !BuildConfig.IS_OEM_AUTOMOTIVE_BUILD) { // Vivaldi [POLE-10] ModelList videoGroup = new ModelList(); videoGroup.add(createListItem(Item.SAVE_VIDEO)); groupedItems.add(new Pair<>(R.string.contextmenu_video_title, videoGroup)); } if (mParams.getOpenedFromHighlight()) { ModelList sharedHighlightingGroup = new ModelList(); if (mMode == ContextMenuMode.NORMAL) { sharedHighlightingGroup.add(createListItem(Item.SHARE_HIGHLIGHT)); } sharedHighlightingGroup.add(createListItem(Item.REMOVE_HIGHLIGHT)); if (mMode == ContextMenuMode.NORMAL) { sharedHighlightingGroup.add(createListItem(Item.LEARN_MORE)); } groupedItems.add(new Pair<>(null, sharedHighlightingGroup)); } if (mMode != ContextMenuMode.NORMAL && FirstRunStatus.getFirstRunFlowComplete()) { ModelList items = groupedItems.isEmpty() ? new ModelList() : groupedItems .get(mMode == ContextMenuMode.CUSTOM_TAB ? 0 : groupedItems.size() - 1) .second; if (UrlUtilities.isAcceptedScheme(mParams.getUrl())) { if (mMode == ContextMenuMode.WEB_APP) { items.add(createListItem(Item.OPEN_IN_CHROME)); } else if (mMode == ContextMenuMode.CUSTOM_TAB && !mItemDelegate.isIncognito()) { boolean addNewEntries = !UrlUtilities.isInternalScheme(mParams.getUrl()) && !isEmptyUrl(mParams.getUrl()); if (SharedPreferencesManager.getInstance().readBoolean( ChromePreferenceKeys.CHROME_DEFAULT_BROWSER, false) && addNewEntries) { if (mItemDelegate.isIncognitoSupported()) { items.add(0, createListItem(Item.OPEN_IN_CHROME_INCOGNITO_TAB)); } items.add(0, createListItem(Item.OPEN_IN_NEW_CHROME_TAB)); } else if (addNewEntries && UrlUtilities.isAcceptedScheme(mParams.getUrl())) { items.add(0, createListItem(Item.OPEN_IN_BROWSER_ID)); } } } if (groupedItems.isEmpty() && items.size() > 0) { groupedItems.add(new Pair<>(R.string.contextmenu_link_title, items)); } } if (!groupedItems.isEmpty() && BrowserStartupController.getInstance().isFullBrowserStarted()) { if (!hasSaveImage) { ContextMenuUma.recordSaveImageUma(mParams.isImage() ? ContextMenuUma.TypeSaveImage.DISABLED_AND_IS_IMAGE_PARAM : ContextMenuUma.TypeSaveImage.DISABLED_AND_IS_NOT_IMAGE_PARAM); } else { ContextMenuUma.recordSaveImageUma(ContextMenuUma.TypeSaveImage.SHOWN); } } return groupedItems; } @VisibleForTesting boolean shouldTriggerEphemeralTabHelpUi() { Tracker tracker = TrackerFactory.getTrackerForProfile(getProfile()); return tracker.isInitialized() && tracker.shouldTriggerHelpUI(FeatureConstants.EPHEMERAL_TAB_FEATURE); } @VisibleForTesting boolean shouldTriggerReadLaterHelpUi() { Tracker tracker = TrackerFactory.getTrackerForProfile(getProfile()); return tracker.isInitialized() && tracker.shouldTriggerHelpUI(FeatureConstants.READ_LATER_CONTEXT_MENU_FEATURE); } @Override public boolean isIncognito() { return mItemDelegate.isIncognito(); } @Override public String getPageTitle() { return mItemDelegate.getPageTitle(); } @Override public boolean onItemSelected(int itemId) { if (itemId == R.id.contextmenu_open_in_new_tab) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_NEW_TAB); if (ChromeApplicationImpl.isVivaldi()) mItemDelegate.onOpenInNewTabForeground( mParams.getUrl().getSpec(), mParams.getReferrer()); else mItemDelegate.onOpenInNewTab(mParams.getUrl(), mParams.getReferrer()); } else if (itemId == R.id.contextmenu_open_in_new_tab_in_group) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_NEW_TAB_IN_GROUP); mItemDelegate.onOpenInNewTabInGroup(mParams.getUrl(), mParams.getReferrer()); } else if (itemId == R.id.contextmenu_open_in_incognito_tab) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_INCOGNITO_TAB); mItemDelegate.onOpenInNewIncognitoTab(mParams.getUrl()); } else if (itemId == R.id.contextmenu_open_in_other_window) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_OTHER_WINDOW); mItemDelegate.onOpenInOtherWindow(mParams.getUrl(), mParams.getReferrer()); } else if (itemId == R.id.contextmenu_open_in_new_window) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_NEW_WINDOW); // |openInOtherWindow| can handle opening in a new window as well. mItemDelegate.onOpenInOtherWindow(mParams.getUrl(), mParams.getReferrer()); } else if (itemId == R.id.contextmenu_open_in_ephemeral_tab) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_EPHEMERAL_TAB); mItemDelegate.onOpenInEphemeralTab(mParams.getUrl(), mParams.getLinkText()); } else if (itemId == R.id.contextmenu_open_image) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IMAGE); mItemDelegate.onOpenImageUrl(mParams.getSrcUrl(), mParams.getReferrer()); } else if (itemId == R.id.contextmenu_open_image_in_new_tab) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IMAGE_IN_NEW_TAB); mItemDelegate.onOpenImageInNewTab(mParams.getSrcUrl(), mParams.getReferrer()); } else if (itemId == R.id.contextmenu_open_image_in_ephemeral_tab) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IMAGE_IN_EPHEMERAL_TAB); String title = mParams.getTitleText(); if (TextUtils.isEmpty(title)) { title = URLUtil.guessFileName(mParams.getSrcUrl().getSpec(), null, null); } mItemDelegate.onOpenInEphemeralTab(mParams.getSrcUrl(), title); } else if (itemId == R.id.contextmenu_copy_image) { recordContextMenuSelection(ContextMenuUma.Action.COPY_IMAGE); copyImageToClipboard(); } else if (itemId == R.id.contextmenu_copy_link_address) { recordContextMenuSelection(ContextMenuUma.Action.COPY_LINK_ADDRESS); mItemDelegate.onSaveToClipboard(mParams.getUnfilteredLinkUrl().getSpec(), ContextMenuItemDelegate.ClipboardType.LINK_URL); } else if (itemId == R.id.contextmenu_call) { recordContextMenuSelection(ContextMenuUma.Action.CALL); mItemDelegate.onCall(mParams.getLinkUrl()); } else if (itemId == R.id.contextmenu_send_message) { if (MailTo.isMailTo(mParams.getLinkUrl().getSpec())) { recordContextMenuSelection(ContextMenuUma.Action.SEND_EMAIL); mItemDelegate.onSendEmailMessage(mParams.getLinkUrl()); } else if (UrlUtilities.isTelScheme(mParams.getLinkUrl())) { recordContextMenuSelection(ContextMenuUma.Action.SEND_TEXT_MESSAGE); mItemDelegate.onSendTextMessage(mParams.getLinkUrl()); } } else if (itemId == R.id.contextmenu_add_to_contacts) { recordContextMenuSelection(ContextMenuUma.Action.ADD_TO_CONTACTS); mItemDelegate.onAddToContacts(mParams.getLinkUrl()); } else if (itemId == R.id.contextmenu_copy) { if (MailTo.isMailTo(mParams.getLinkUrl().getSpec())) { recordContextMenuSelection(ContextMenuUma.Action.COPY_EMAIL_ADDRESS); mItemDelegate.onSaveToClipboard( MailTo.parse(mParams.getLinkUrl().getSpec()).getTo(), ContextMenuItemDelegate.ClipboardType.LINK_URL); } else if (UrlUtilities.isTelScheme(mParams.getLinkUrl())) { recordContextMenuSelection(ContextMenuUma.Action.COPY_PHONE_NUMBER); mItemDelegate.onSaveToClipboard(UrlUtilities.getTelNumber(mParams.getLinkUrl()), ContextMenuItemDelegate.ClipboardType.LINK_URL); } } else if (itemId == R.id.contextmenu_copy_link_text) { recordContextMenuSelection(ContextMenuUma.Action.COPY_LINK_TEXT); mItemDelegate.onSaveToClipboard( mParams.getLinkText(), ContextMenuItemDelegate.ClipboardType.LINK_TEXT); } else if (itemId == R.id.contextmenu_save_image) { recordContextMenuSelection(ContextMenuUma.Action.SAVE_IMAGE); if (mItemDelegate.startDownload(mParams.getSrcUrl(), false)) { mNativeDelegate.startDownload(false); } } else if (itemId == R.id.contextmenu_save_video) { recordContextMenuSelection(ContextMenuUma.Action.SAVE_VIDEO); if (mItemDelegate.startDownload(mParams.getSrcUrl(), false)) { mNativeDelegate.startDownload(false); } } else if (itemId == R.id.contextmenu_save_link_as) { recordContextMenuSelection(ContextMenuUma.Action.SAVE_LINK); GURL url = mParams.getUnfilteredLinkUrl(); if (mItemDelegate.startDownload(url, true)) { ContextMenuUma.recordSaveLinkTypes(url); mNativeDelegate.startDownload(true); } } else if (itemId == R.id.contextmenu_share_link) { recordContextMenuSelection(ContextMenuUma.Action.SHARE_LINK); // TODO(https://crbug.com/783819): Migrate ShareParams to GURL. ShareParams linkShareParams = new ShareParams .Builder(getWindow(), ContextMenuUtils.getTitle(mParams), mParams.getUrl().getSpec()) .build(); mShareDelegateSupplier.get().share(linkShareParams, new ChromeShareExtras.Builder().setSaveLastUsed(true).build(), ShareOrigin.CONTEXT_MENU); } else if (itemId == R.id.contextmenu_read_later) { recordContextMenuSelection(ContextMenuUma.Action.READ_LATER); // TODO(crbug.com/1147475): Download the page to offline page backend. String title = mParams.getTitleText(); if (TextUtils.isEmpty(title)) { title = mParams.getLinkText(); } mItemDelegate.onReadLater(mParams.getUrl(), title); } else if (itemId == R.id.contextmenu_direct_share_link) { recordContextMenuSelection(ContextMenuUma.Action.DIRECT_SHARE_LINK); final ShareParams shareParams = new ShareParams .Builder(getWindow(), mParams.getUrl().getSpec(), mParams.getUrl().getSpec()) .build(); ShareHelper.shareWithLastUsedComponent(shareParams); } else if (itemId == R.id.contextmenu_search_with_google_lens) { recordContextMenuSelection(ContextMenuUma.Action.SEARCH_WITH_GOOGLE_LENS); searchWithGoogleLens(LensEntryPoint.CONTEXT_MENU_SEARCH_MENU_ITEM); SharedPreferencesManager prefManager = SharedPreferencesManager.getInstance(); prefManager.writeBoolean( ChromePreferenceKeys.CONTEXT_MENU_SEARCH_WITH_GOOGLE_LENS_CLICKED, true); } else if (itemId == R.id.contextmenu_search_by_image) { recordContextMenuSelection(ContextMenuUma.Action.SEARCH_BY_IMAGE); mNativeDelegate.searchForImage(); } else if (itemId == R.id.contextmenu_shop_image_with_google_lens) { recordContextMenuSelection(ContextMenuUma.Action.SHOP_IMAGE_WITH_GOOGLE_LENS); searchWithGoogleLens(LensEntryPoint.CONTEXT_MENU_SHOP_MENU_ITEM); SharedPreferencesManager prefManager = SharedPreferencesManager.getInstance(); prefManager.writeBoolean( ChromePreferenceKeys.CONTEXT_MENU_SHOP_IMAGE_WITH_GOOGLE_LENS_CLICKED, true); } else if (itemId == R.id.contextmenu_share_image) { recordContextMenuSelection(ContextMenuUma.Action.SHARE_IMAGE); shareImage(); } else if (itemId == R.id.contextmenu_direct_share_image) { recordContextMenuSelection(ContextMenuUma.Action.DIRECT_SHARE_IMAGE); mNativeDelegate.retrieveImageForShare(ContextMenuImageFormat.ORIGINAL, (Uri uri) -> { ShareHelper.shareImage( getWindow(), getProfile(), ShareHelper.getLastShareComponentName(), uri); }); } else if (itemId == R.id.contextmenu_open_in_chrome) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_CHROME); mItemDelegate.onOpenInChrome(mParams.getUrl(), mParams.getPageUrl()); } else if (itemId == R.id.contextmenu_open_in_new_chrome_tab) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_NEW_CHROME_TAB); mItemDelegate.onOpenInNewChromeTabFromCCT(mParams.getUrl(), false); } else if (itemId == R.id.contextmenu_open_in_chrome_incognito_tab) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_CHROME_INCOGNITO_TAB); mItemDelegate.onOpenInNewChromeTabFromCCT(mParams.getUrl(), true); } else if (itemId == R.id.contextmenu_open_in_browser_id) { recordContextMenuSelection(ContextMenuUma.Action.OPEN_IN_BROWSER); mItemDelegate.onOpenInDefaultBrowser(mParams.getUrl()); } else if (itemId == R.id.contextmenu_share_highlight) { recordContextMenuSelection(ContextMenuUma.Action.SHARE_HIGHLIGHT); shareHighlighting(); } else if (itemId == R.id.contextmenu_remove_highlight) { recordContextMenuSelection(ContextMenuUma.Action.REMOVE_HIGHLIGHT); LinkToTextHelper.removeHighlightsAllFrames(mItemDelegate.getWebContents()); } else if (itemId == R.id.contextmenu_learn_more) { recordContextMenuSelection(ContextMenuUma.Action.LEARN_MORE); mItemDelegate.onOpenInNewTab(new GURL(LinkToTextHelper.SHARED_HIGHLIGHTING_SUPPORT_URL), mParams.getReferrer()); } else if (itemId == R.id.contextmenu_open_in_new_tab_background) { // Vivaldi recordContextMenuSelection(ContextMenuUma.Action.OPEN_IMAGE_IN_NEW_TAB); mItemDelegate.onOpenInNewTab(mParams.getUrl(), mParams.getReferrer()); } else { assert false; } return true; } @Override public void onMenuClosed() { if (mShowEphemeralTabNewLabel != null && mShowEphemeralTabNewLabel) { Tracker tracker = TrackerFactory.getTrackerForProfile(getProfile()); if (tracker.isInitialized()) tracker.dismissed(FeatureConstants.EPHEMERAL_TAB_FEATURE); } } private WindowAndroid getWindow() { return mItemDelegate.getWebContents().getTopLevelNativeWindow(); } private Activity getActivity() { return getWindow().getActivity().get(); } private void shareHighlighting() { ShareParams linkShareParams = new ShareParams .Builder(getWindow(), /*title=*/"", /*url=*/mParams.getUrl().getSpec()) .build(); mShareDelegateSupplier.get().share(linkShareParams, new ChromeShareExtras.Builder() .setSaveLastUsed(true) .setIsReshareHighlightedText(true) .setRenderFrameHost(mNativeDelegate.getRenderFrameHost()) .setDetailedContentType( ChromeShareExtras.DetailedContentType.HIGHLIGHTED_TEXT) .build(), ShareOrigin.MOBILE_ACTION_MODE); } /** * Copy the image, that triggered the current context menu, to system clipboard. */ private void copyImageToClipboard() { mNativeDelegate.retrieveImageForShare( ContextMenuImageFormat.ORIGINAL, mItemDelegate::onSaveImageToClipboard); } /** * Share the image that triggered the current context menu. * Package-private, allowing access only from the context menu item to ensure that * it will use the right activity set when the menu was displayed. */ private void shareImage() { mNativeDelegate.retrieveImageForShare(ContextMenuImageFormat.ORIGINAL, (Uri imageUri) -> { if (!mShareDelegateSupplier.get().isSharingHubEnabled()) { ShareHelper.shareImage(getWindow(), Profile.fromWebContents(mItemDelegate.getWebContents()), null, imageUri); return; } ContentResolver contentResolver = ContextUtils.getApplicationContext().getContentResolver(); String mimeType = contentResolver.getType(imageUri); ShareParams imageShareParams = new ShareParams .Builder(getWindow(), ContextMenuUtils.getTitle(mParams), /*url=*/"") .setFileUris(new ArrayList<>(Collections.singletonList(imageUri))) .setFileContentType(mimeType) .build(); int detailedContentType; if (mimeType.equals("image/gif")) { detailedContentType = ChromeShareExtras.DetailedContentType.GIF; } else { detailedContentType = ChromeShareExtras.DetailedContentType.IMAGE; } mShareDelegateSupplier.get().share(imageShareParams, new ChromeShareExtras.Builder() .setSaveLastUsed(true) .setImageSrcUrl(mParams.getSrcUrl()) .setContentUrl(mParams.getPageUrl()) .setDetailedContentType(detailedContentType) .build(), ShareOrigin.CONTEXT_MENU); }); } /** * @return The service that handles TemplateUrls. */ protected TemplateUrlService getTemplateUrlService() { return TemplateUrlServiceFactory.get(); } /** * Search for the image by intenting to the lens app with the image data attached. * @param lensEntryPoint The entry point that launches the Lens app. */ protected void searchWithGoogleLens(@LensEntryPoint int lensEntryPoint) { mNativeDelegate.retrieveImageForShare(ContextMenuImageFormat.PNG, (Uri imageUri) -> { LensIntentParams intentParams = getLensIntentParams(lensEntryPoint, imageUri); LensController.getInstance().startLens(getWindow(), intentParams); }); } /** * Build the intent params for Lens Context Menu features. * @param lensEntryPoint The entry point that launches the Lens app. * @param imageUri The image url that the context menu was triggered on. * @return A LensIntentParams. Will be used to launch the Lens app. */ @VisibleForTesting protected LensIntentParams getLensIntentParams( @LensEntryPoint int lensEntryPoint, Uri imageUri) { return new LensIntentParams.Builder(lensEntryPoint, isIncognito()) .withImageUri(imageUri) .withImageTitleOrAltText(mParams.getTitleText()) .withSrcUrl(mParams.getSrcUrl().getValidSpecOrEmpty()) .withPageUrl(mParams.getPageUrl().getValidSpecOrEmpty()) .build(); } @Override public @Nullable ChipDelegate getChipDelegate() { if (LensChipDelegate.isEnabled(isIncognito(), isTabletScreen())) { // TODO(crbug.com/783819): Migrate LensChipDelegate to GURL. return new LensChipDelegate(mParams.getPageUrl().getSpec(), mParams.getTitleText(), mParams.getSrcUrl().getSpec(), getPageTitle(), isIncognito(), isTabletScreen(), mItemDelegate.getWebContents(), mNativeDelegate, getOnChipClickedCallback(), getOnChipShownCallback()); } return null; } private Callback<Integer> getOnChipShownCallback() { return (Integer result) -> { int chipType = result.intValue(); maybeRecordUkmLensChipShown(chipType); }; } private Callback<Integer> getOnChipClickedCallback() { return (Integer result) -> { int chipType = result.intValue(); switch (chipType) { case ChipRenderParams.ChipType.LENS_SHOPPING_CHIP: recordContextMenuSelection(ContextMenuUma.Action.SHOP_WITH_GOOGLE_LENS_CHIP); return; case ChipRenderParams.ChipType.LENS_TRANSLATE_CHIP: recordContextMenuSelection( ContextMenuUma.Action.TRANSLATE_WITH_GOOGLE_LENS_CHIP); return; default: // Unreachable value. throw new IllegalArgumentException("Invalid chip type provided to callback."); } }; } /** * Checks whether a url is empty or blank. * @param url The url need to be checked. * @return True if the url is empty or "about:blank". */ private static boolean isEmptyUrl(GURL url) { return url == null || url.isEmpty() || url.getSpec().equals(ContentUrlConstants.ABOUT_BLANK_DISPLAY_URL); } /** * Record the UMA related to save image context menu option. * @param isDownloadableScheme The image is downloadable. */ private void recordSaveImageContextMenuResult(boolean isDownloadableScheme) { if (!BrowserStartupController.getInstance().isFullBrowserStarted()) { return; } ContextMenuUma.recordSaveImageUma(ContextMenuUma.TypeSaveImage.LOADED); if (!isDownloadableScheme) { ContextMenuUma.recordSaveImageUma(ContextMenuUma.TypeSaveImage.NOT_DOWNLOADABLE); } } /** * Record a UMA ping and a UKM ping if enabled. */ private void recordContextMenuSelection(int actionId) { ContextMenuUma.record(mItemDelegate.getWebContents(), mParams, actionId); if (LensUtils.shouldLogUkmForLensContextMenuFeatures()) { maybeRecordActionUkm("ContextMenuAndroid.Selected", actionId); } } /** * Whether the lens menu items should be shown based on a set of application * compatibility checks. * * @param pageUrl The Url associated with the main frame of the page that triggered the context * menu. * @param isIncognito Whether the user is incognito. * @return An immutable map. Can be used to check whether a specific Lens menu item is enabled. */ private Map<String, Boolean> getSearchByImageMenuItemsToShowAndRecordMetrics( GURL pageUrl, boolean isIncognito) { // If Google Lens feature is not supported, show search by image menu item. if (!LensUtils.isGoogleLensFeatureEnabled(isIncognito)) { // TODO(yusuyoutube): Cleanup. Remove repetition. return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, true); } }); } if (isTabletScreen() && !LensUtils.isGoogleLensFeatureEnabledOnTablet()) { LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.DISABLED_ON_TABLET); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, true); } }); } final TemplateUrlService templateUrlServiceInstance = getTemplateUrlService(); String versionName = LensUtils.getLensActivityVersionNameIfAvailable(mContext); if (!templateUrlServiceInstance.isDefaultSearchEngineGoogle()) { LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.NON_GOOGLE_SEARCH_ENGINE); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, true); } }); } if (TextUtils.isEmpty(versionName)) { LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.ACTIVITY_NOT_ACCESSIBLE); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, true); } }); } if (GSAState.getInstance(mContext).isAgsaVersionBelowMinimum( versionName, LensUtils.getMinimumAgsaVersionForLensSupport())) { LensMetrics.recordLensSupportStatus( LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.OUT_OF_DATE); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, true); } }); } if (LensUtils.isDeviceOsBelowMinimum()) { LensMetrics.recordLensSupportStatus( LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.LEGACY_OS); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, true); } }); } if (!LensUtils.isValidAgsaPackage(mExternalAuthUtils)) { LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.INVALID_PACKAGE); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, true); } }); } // In Lens Shopping Menu Item experiment, fallback to Search image with Google Lens // When the url is not in domain allowlist and AGSA version is equal to or greater than the // minimum shopping supported version. if (LensUtils.isGoogleLensShoppingFeatureEnabled(isIncognito) && !GSAState.getInstance(mContext).isAgsaVersionBelowMinimum( versionName, LensUtils.getMinimumAgsaVersionForLensShoppingSupport()) && LensUtils.isInShoppingAllowlist(pageUrl)) { // Show both search and shop menu items when experiment with both Lens searching and // shopping. if (ChromeFeatureList.isEnabled( ChromeFeatureList.CONTEXT_MENU_SEARCH_AND_SHOP_WITH_GOOGLE_LENS)) { LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.LENS_SHOP_AND_SEARCH_SUPPORTED); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, true); put(LENS_SHOP_MENU_ITEM_KEY, true); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, false); } }); } // Hide Search With Google Lens menu item when experiment only with Lens Shopping // menu items. LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.LENS_SHOP_SUPPORTED); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, false); put(LENS_SHOP_MENU_ITEM_KEY, true); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, false); } }); } LensMetrics.recordLensSupportStatus(LENS_SUPPORT_STATUS_HISTOGRAM_NAME, LensMetrics.LensSupportStatus.LENS_SEARCH_SUPPORTED); return Collections.unmodifiableMap(new HashMap<String, Boolean>() { { put(LENS_SEARCH_MENU_ITEM_KEY, true); put(LENS_SHOP_MENU_ITEM_KEY, false); put(SEARCH_BY_IMAGE_MENU_ITEM_KEY, false); } }); } private ListItem createListItem(@Item int item) { return createListItem(item, false); } private ListItem createListItem(@Item int item, boolean showInProductHelp) { final PropertyModel model = new PropertyModel.Builder(ContextMenuItemProperties.ALL_KEYS) .with(MENU_ID, ChromeContextMenuItem.getMenuId(item)) .with(TEXT, ChromeContextMenuItem.getTitle(mContext, item, showInProductHelp)) .build(); return new ListItem(ListItemType.CONTEXT_MENU_ITEM, model); } private ListItem createShareListItem(@Item int item, @Item int iconButtonItem) { final boolean isLink = item == Item.SHARE_LINK; final Pair<Drawable, CharSequence> shareInfo = createRecentShareAppInfo(isLink); final PropertyModel model = new PropertyModel.Builder(ContextMenuItemWithIconButtonProperties.ALL_KEYS) .with(MENU_ID, ChromeContextMenuItem.getMenuId(item)) .with(TEXT, ChromeContextMenuItem.getTitle(mContext, item, false)) .with(BUTTON_IMAGE, shareInfo.first) .with(BUTTON_CONTENT_DESC, shareInfo.second) .with(BUTTON_MENU_ID, ChromeContextMenuItem.getMenuId(iconButtonItem)) .build(); return new ListItem(ListItemType.CONTEXT_MENU_ITEM_WITH_ICON_BUTTON, model); } /** * Return the icon and name of the most recently shared app by certain app. * @param isLink Whether the item is SHARE_LINK. */ private static Pair<Drawable, CharSequence> createRecentShareAppInfo(boolean isLink) { Intent shareIntent = isLink ? ShareHelper.getShareLinkAppCompatibilityIntent() : ShareHelper.getShareImageIntent(null); return ShareHelper.getShareableIconAndName(shareIntent); } /** * If not disabled record a UKM for opening the context menu with the search by image option. */ private void maybeRecordUkmSearchByImageShown() { if (LensUtils.shouldLogUkmForLensContextMenuFeatures()) { maybeRecordBooleanUkm("ContextMenuAndroid.Shown", "SearchByImage"); } } /** * If not disabled record a UKM for opening the context menu with the lens item. */ private void maybeRecordUkmLensShown() { if (LensUtils.shouldLogUkmByFeature( ChromeFeatureList.CONTEXT_MENU_SEARCH_WITH_GOOGLE_LENS)) { maybeRecordBooleanUkm("ContextMenuAndroid.Shown", "SearchWithGoogleLens"); } } /** * If not disabled record a UKM for opening the context menu with the lens shopping * item. */ private void maybeRecordUkmLensShoppingShown() { if (LensUtils.shouldLogUkmByFeature(ChromeFeatureList.CONTEXT_MENU_SHOP_WITH_GOOGLE_LENS)) { maybeRecordBooleanUkm("ContextMenuAndroid.Shown", "ShopWithGoogleLens"); } } private void maybeRecordUkmLensChipShown(int chipType) { String actionName = null; switch (chipType) { case ChipRenderParams.ChipType.LENS_SHOPPING_CHIP: if (!LensUtils.shouldLogUkmByFeature( ChromeFeatureList.CONTEXT_MENU_GOOGLE_LENS_CHIP)) { return; } actionName = "ShopWithGoogleLensChip"; break; case ChipRenderParams.ChipType.LENS_TRANSLATE_CHIP: if (!LensUtils.shouldLogUkmByFeature( ChromeFeatureList.CONTEXT_MENU_TRANSLATE_WITH_GOOGLE_LENS)) { return; } actionName = "TranslateWithGoogleLensChip"; break; default: // Unreachable value. assert false : "Invalid chip type provided to callback."; } maybeRecordBooleanUkm("ContextMenuAndroid.Shown", actionName); } /** * Initialize the bridge if not yet created. */ private void initializeUkmRecorderBridge() { if (mUkmRecorderBridge == null) { mUkmRecorderBridge = new UkmRecorder.Bridge(); } } /** * Record a boolean UKM if the lens feature is enabled. * @param eventName The name of the UKM event to record. * @param metricName The name of the UKM metric to record. */ private void maybeRecordBooleanUkm(String eventName, String metricName) { // Disable UKM reporting when incognito. if (mItemDelegate.isIncognito()) return; initializeUkmRecorderBridge(); WebContents webContents = mItemDelegate.getWebContents(); if (webContents != null) { mUkmRecorderBridge.recordEventWithBooleanMetric(webContents, eventName, metricName); } } /** * Record a UKM for a menu action if the lens feature is enabled. * @param eventName The name of the boolean UKM event to record. * @param actionId The id of the action corresponding the ContextMenuUma.Action enum. */ private void maybeRecordActionUkm(String eventName, int actionId) { // Disable UKM reporting when incognito. if (mItemDelegate.isIncognito()) return; initializeUkmRecorderBridge(); WebContents webContents = mItemDelegate.getWebContents(); if (webContents != null) { mUkmRecorderBridge.recordEventWithIntegerMetric( webContents, eventName, "Action", actionId); } } /** * Check if the search by image is supported. * @param isSrcDownloadableScheme Whether the source url has a downloadable scheme. * @return True if search by image is supported. */ private boolean checkSupportsGoogleSearchByImage(boolean isSrcDownloadableScheme) { final TemplateUrlService templateUrlServiceInstance = getTemplateUrlService(); return isSrcDownloadableScheme && templateUrlServiceInstance.isLoaded() && templateUrlServiceInstance.isSearchByImageAvailable() && templateUrlServiceInstance.getDefaultSearchEngineTemplateUrl() != null && !LocaleManager.getInstance().needToCheckForSearchEnginePromo(); } /** Returns the profile of the current tab via the item delegate. */ private Profile getProfile() { return Profile.fromWebContents(mItemDelegate.getWebContents()); } }
/* BASSMIDI 2.4 Java class Copyright (c) 2006-2013 Un4seen Developments Ltd. See the BASSMIDI.CHM file for more detailed documentation */ package com.un4seen.bass; import java.nio.ByteBuffer; public class BASSMIDI { // Additional BASS_SetConfig options public static final int BASS_CONFIG_MIDI_COMPACT = 0x10400; public static final int BASS_CONFIG_MIDI_VOICES = 0x10401; public static final int BASS_CONFIG_MIDI_AUTOFONT = 0x10402; // Additional BASS_SetConfigPtr options public static final int BASS_CONFIG_MIDI_DEFFONT = 0x10403; // Additional sync types public static final int BASS_SYNC_MIDI_MARK = 0x10000; public static final int BASS_SYNC_MIDI_MARKER = 0x10000; public static final int BASS_SYNC_MIDI_CUE = 0x10001; public static final int BASS_SYNC_MIDI_LYRIC = 0x10002; public static final int BASS_SYNC_MIDI_TEXT = 0x10003; public static final int BASS_SYNC_MIDI_EVENT = 0x10004; public static final int BASS_SYNC_MIDI_TICK = 0x10005; public static final int BASS_SYNC_MIDI_TIMESIG = 0x10006; public static final int BASS_SYNC_MIDI_KEYSIG = 0x10007; // Additional BASS_MIDI_StreamCreateFile/etc flags public static final int BASS_MIDI_DECAYEND = 0x1000; public static final int BASS_MIDI_NOFX = 0x2000; public static final int BASS_MIDI_DECAYSEEK = 0x4000; public static final int BASS_MIDI_NOCROP = 0x8000; public static final int BASS_MIDI_SINCINTER = 0x800000; // BASS_MIDI_FontInit flags public static final int BASS_MIDI_FONT_MEM = 0x10000; public static final int BASS_MIDI_FONT_MMAP = 0x20000; // BASS_MIDI_StreamSet/GetFonts flag public static final int BASS_MIDI_FONT_EX = 0x1000000; // BASS_MIDI_FONTEX (auto-detected) // Marker types public static final int BASS_MIDI_MARK_MARKER = 0; // marker public static final int BASS_MIDI_MARK_CUE = 1; // cue point public static final int BASS_MIDI_MARK_LYRIC = 2; // lyric public static final int BASS_MIDI_MARK_TEXT = 3; // text public static final int BASS_MIDI_MARK_TIMESIG = 4; // time signature public static final int BASS_MIDI_MARK_KEYSIG = 5; // key signature public static final int BASS_MIDI_MARK_COPY = 6; // copyright notice public static final int BASS_MIDI_MARK_TRACK = 7; // track name public static final int BASS_MIDI_MARK_INST = 8; // instrument name public static final int BASS_MIDI_MARK_TICK = 0x10000; // FLAG: get position in ticks (otherwise bytes) // MIDI events public static final int MIDI_EVENT_NOTE = 1; public static final int MIDI_EVENT_PROGRAM = 2; public static final int MIDI_EVENT_CHANPRES = 3; public static final int MIDI_EVENT_PITCH = 4; public static final int MIDI_EVENT_PITCHRANGE = 5; public static final int MIDI_EVENT_DRUMS = 6; public static final int MIDI_EVENT_FINETUNE = 7; public static final int MIDI_EVENT_COARSETUNE = 8; public static final int MIDI_EVENT_MASTERVOL = 9; public static final int MIDI_EVENT_BANK = 10; public static final int MIDI_EVENT_MODULATION = 11; public static final int MIDI_EVENT_VOLUME = 12; public static final int MIDI_EVENT_PAN = 13; public static final int MIDI_EVENT_EXPRESSION = 14; public static final int MIDI_EVENT_SUSTAIN = 15; public static final int MIDI_EVENT_SOUNDOFF = 16; public static final int MIDI_EVENT_RESET = 17; public static final int MIDI_EVENT_NOTESOFF = 18; public static final int MIDI_EVENT_PORTAMENTO = 19; public static final int MIDI_EVENT_PORTATIME = 20; public static final int MIDI_EVENT_PORTANOTE = 21; public static final int MIDI_EVENT_MODE = 22; public static final int MIDI_EVENT_REVERB = 23; public static final int MIDI_EVENT_CHORUS = 24; public static final int MIDI_EVENT_CUTOFF = 25; public static final int MIDI_EVENT_RESONANCE = 26; public static final int MIDI_EVENT_RELEASE = 27; public static final int MIDI_EVENT_ATTACK = 28; public static final int MIDI_EVENT_REVERB_MACRO = 30; public static final int MIDI_EVENT_CHORUS_MACRO = 31; public static final int MIDI_EVENT_REVERB_TIME = 32; public static final int MIDI_EVENT_REVERB_DELAY = 33; public static final int MIDI_EVENT_REVERB_LOCUTOFF = 34; public static final int MIDI_EVENT_REVERB_HICUTOFF = 35; public static final int MIDI_EVENT_REVERB_LEVEL = 36; public static final int MIDI_EVENT_CHORUS_DELAY = 37; public static final int MIDI_EVENT_CHORUS_DEPTH = 38; public static final int MIDI_EVENT_CHORUS_RATE = 39; public static final int MIDI_EVENT_CHORUS_FEEDBACK = 40; public static final int MIDI_EVENT_CHORUS_LEVEL = 41; public static final int MIDI_EVENT_CHORUS_REVERB = 42; public static final int MIDI_EVENT_DRUM_FINETUNE = 50; public static final int MIDI_EVENT_DRUM_COARSETUNE = 51; public static final int MIDI_EVENT_DRUM_PAN = 52; public static final int MIDI_EVENT_DRUM_REVERB = 53; public static final int MIDI_EVENT_DRUM_CHORUS = 54; public static final int MIDI_EVENT_DRUM_CUTOFF = 55; public static final int MIDI_EVENT_DRUM_RESONANCE = 56; public static final int MIDI_EVENT_DRUM_LEVEL = 57; public static final int MIDI_EVENT_SOFT = 60; public static final int MIDI_EVENT_SYSTEM = 61; public static final int MIDI_EVENT_TEMPO = 62; public static final int MIDI_EVENT_SCALETUNING = 63; public static final int MIDI_EVENT_CONTROL = 64; public static final int MIDI_EVENT_CHANPRES_VIBRATO = 65; public static final int MIDI_EVENT_CHANPRES_PITCH = 66; public static final int MIDI_EVENT_CHANPRES_FILTER = 67; public static final int MIDI_EVENT_CHANPRES_VOLUME = 68; public static final int MIDI_EVENT_MODRANGE = 69; public static final int MIDI_EVENT_BANK_LSB = 70; public static final int MIDI_EVENT_MIXLEVEL = 0x10000; public static final int MIDI_EVENT_TRANSPOSE = 0x10001; public static final int MIDI_EVENT_SYSTEMEX = 0x10002; public static final int MIDI_EVENT_END = 0; public static final int MIDI_EVENT_END_TRACK = 0x10003; public static final int MIDI_SYSTEM_DEFAULT = 0; public static final int MIDI_SYSTEM_GM1 = 1; public static final int MIDI_SYSTEM_GM2 = 2; public static final int MIDI_SYSTEM_XG = 3; public static final int MIDI_SYSTEM_GS = 4; // BASS_MIDI_StreamEvents modes public static final int BASS_MIDI_EVENTS_STRUCT = 0; // BASS_MIDI_EVENT structures public static final int BASS_MIDI_EVENTS_RAW = 0x10000; // raw MIDI event data public static final int BASS_MIDI_EVENTS_SYNC = 0x1000000; // FLAG: trigger event syncs // BASS_CHANNELINFO type public static final int BASS_CTYPE_STREAM_MIDI = 0x10d00; // Additional attributes public static final int BASS_ATTRIB_MIDI_PPQN = 0x12000; public static final int BASS_ATTRIB_MIDI_CPU = 0x12001; public static final int BASS_ATTRIB_MIDI_CHANS = 0x12002; public static final int BASS_ATTRIB_MIDI_VOICES = 0x12003; public static final int BASS_ATTRIB_MIDI_VOICES_ACTIVE = 0x12004; public static final int BASS_ATTRIB_MIDI_TRACK_VOL = 0x12100; // + track # // Additional tag type public static final int BASS_TAG_MIDI_TRACK = 0x11000; // + track #, track text : array of null-terminated ANSI strings // BASS_ChannelGetLength/GetPosition/SetPosition mode public static final int BASS_POS_MIDI_TICK = 2; // tick position static { System.loadLibrary("bassmidi"); } public static native int BASS_MIDI_StreamCreate(int channels, int flags, int freq); public static native int BASS_MIDI_StreamCreateFile(String file, long offset, long length, int flags, int freq); public static native int BASS_MIDI_StreamCreateFile(ByteBuffer file, long offset, long length, int flags, int freq); public static native int BASS_MIDI_StreamCreateURL(String url, int offset, int flags, BASS.DOWNLOADPROC proc, Object user, int freq); public static native int BASS_MIDI_StreamCreateFileUser(int system, int flags, BASS.BASS_FILEPROCS procs, Object user, int freq); public static native int BASS_MIDI_StreamCreateEvents(BASS_MIDI_EVENT[] events, int ppqn, int flags, int freq); public static native boolean BASS_MIDI_StreamGetMark(int handle, int type, int index, BASS_MIDI_MARK mark); public static native int BASS_MIDI_StreamGetMarks(int handle, int track, int type, BASS_MIDI_MARK[] marks); public static native boolean BASS_MIDI_StreamSetFonts(int handle, BASS_MIDI_FONT[] fonts, int count); public static native boolean BASS_MIDI_StreamSetFonts(int handle, BASS_MIDI_FONTEX[] fonts, int count); public static native int BASS_MIDI_StreamGetFonts(int handle, BASS_MIDI_FONT[] fonts, int count); public static native int BASS_MIDI_StreamGetFonts(int handle, BASS_MIDI_FONTEX[] fonts, int count); public static native boolean BASS_MIDI_StreamLoadSamples(int handle); public static native boolean BASS_MIDI_StreamEvent(int handle, int chan, int event, int param); public static native int BASS_MIDI_StreamEvents(int handle, int mode, BASS_MIDI_EVENT[] events, int length); public static native int BASS_MIDI_StreamEvents(int handle, int mode, ByteBuffer events, int length); public static native int BASS_MIDI_StreamGetEvent(int handle, int chan, int event); public static native int BASS_MIDI_StreamGetEvents(int handle, int track, int filter, BASS_MIDI_EVENT[] events); public static native int BASS_MIDI_FontInit(String file, int flags); public static native int BASS_MIDI_FontInit(ByteBuffer file, int flags); public static native int BASS_MIDI_FontInitUser(BASS.BASS_FILEPROCS procs, Object user, int flags); public static native boolean BASS_MIDI_FontFree(int handle); public static native boolean BASS_MIDI_FontGetInfo(int handle, BASS_MIDI_FONTINFO info); // public static native int BASS_MIDI_StreamGetChannel(int handle, int chan); public static native boolean BASS_MIDI_FontGetPresets(int handle, int[] presets); public static native String BASS_MIDI_FontGetPreset(int handle, int preset, int bank); public static native boolean BASS_MIDI_FontLoad(int handle, int preset, int bank); public static native boolean BASS_MIDI_FontUnload(int handle, int preset, int bank); public static native boolean BASS_MIDI_FontCompact(int handle); public static native boolean BASS_MIDI_FontUnpack(int handle, String outfile, int flags); public static native boolean BASS_MIDI_FontSetVolume(int handle, float volume); public static native float BASS_MIDI_FontGetVolume(int handle); public static class BASS_MIDI_FONT { public int font; // soundfont public int preset; // preset number (-1=all) public int bank; } public static class BASS_MIDI_FONTEX { public int font; // soundfont public int spreset; // source preset number public int sbank; // source bank number public int dpreset; // destination preset/program number public int dbank; // destination bank number public int dbanklsb; // destination bank number LSB } public static class BASS_MIDI_FONTINFO { public String name; public String copyright; public String comment; public int presets; // number of presets/instruments public int samsize; // total size (in bytes) of the sample data public int samload; // amount of sample data currently loaded public int samtype; // sample format (CTYPE) if packed } public static class BASS_MIDI_MARK { public int track; // track containing marker public int pos; // marker position public String text; // marker text } public static class BASS_MIDI_EVENT { public int event; // MIDI_EVENT_xxx public int param; public int chan; public int tick; // event position (ticks) public int pos; // event position (bytes) } public static class BASS_MIDI_DEVICEINFO { public String name; // description public int id; public int flags; } }
/** */ package gluemodel.CIM.IEC61970.Informative.InfERPSupport.impl; import gluemodel.CIM.IEC61968.Assets.Asset; import gluemodel.CIM.IEC61968.Assets.AssetsPackage; import gluemodel.CIM.IEC61968.Common.Status; import gluemodel.CIM.IEC61970.Core.impl.IdentifiedObjectImpl; import gluemodel.CIM.IEC61970.Informative.InfERPSupport.ErpItemMaster; import gluemodel.CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Erp Item Master</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link gluemodel.CIM.IEC61970.Informative.InfERPSupport.impl.ErpItemMasterImpl#getStatus <em>Status</em>}</li> * <li>{@link gluemodel.CIM.IEC61970.Informative.InfERPSupport.impl.ErpItemMasterImpl#getAsset <em>Asset</em>}</li> * </ul> * * @generated */ public class ErpItemMasterImpl extends IdentifiedObjectImpl implements ErpItemMaster { /** * The cached value of the '{@link #getStatus() <em>Status</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStatus() * @generated * @ordered */ protected Status status; /** * The cached value of the '{@link #getAsset() <em>Asset</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAsset() * @generated * @ordered */ protected Asset asset; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ErpItemMasterImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return InfERPSupportPackage.Literals.ERP_ITEM_MASTER; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Status getStatus() { if (status != null && status.eIsProxy()) { InternalEObject oldStatus = (InternalEObject)status; status = (Status)eResolveProxy(oldStatus); if (status != oldStatus) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, InfERPSupportPackage.ERP_ITEM_MASTER__STATUS, oldStatus, status)); } } return status; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Status basicGetStatus() { return status; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStatus(Status newStatus) { Status oldStatus = status; status = newStatus; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, InfERPSupportPackage.ERP_ITEM_MASTER__STATUS, oldStatus, status)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Asset getAsset() { if (asset != null && asset.eIsProxy()) { InternalEObject oldAsset = (InternalEObject)asset; asset = (Asset)eResolveProxy(oldAsset); if (asset != oldAsset) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, InfERPSupportPackage.ERP_ITEM_MASTER__ASSET, oldAsset, asset)); } } return asset; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Asset basicGetAsset() { return asset; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetAsset(Asset newAsset, NotificationChain msgs) { Asset oldAsset = asset; asset = newAsset; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, InfERPSupportPackage.ERP_ITEM_MASTER__ASSET, oldAsset, newAsset); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setAsset(Asset newAsset) { if (newAsset != asset) { NotificationChain msgs = null; if (asset != null) msgs = ((InternalEObject)asset).eInverseRemove(this, AssetsPackage.ASSET__ERP_ITEM_MASTER, Asset.class, msgs); if (newAsset != null) msgs = ((InternalEObject)newAsset).eInverseAdd(this, AssetsPackage.ASSET__ERP_ITEM_MASTER, Asset.class, msgs); msgs = basicSetAsset(newAsset, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, InfERPSupportPackage.ERP_ITEM_MASTER__ASSET, newAsset, newAsset)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case InfERPSupportPackage.ERP_ITEM_MASTER__ASSET: if (asset != null) msgs = ((InternalEObject)asset).eInverseRemove(this, AssetsPackage.ASSET__ERP_ITEM_MASTER, Asset.class, msgs); return basicSetAsset((Asset)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case InfERPSupportPackage.ERP_ITEM_MASTER__ASSET: return basicSetAsset(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case InfERPSupportPackage.ERP_ITEM_MASTER__STATUS: if (resolve) return getStatus(); return basicGetStatus(); case InfERPSupportPackage.ERP_ITEM_MASTER__ASSET: if (resolve) return getAsset(); return basicGetAsset(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case InfERPSupportPackage.ERP_ITEM_MASTER__STATUS: setStatus((Status)newValue); return; case InfERPSupportPackage.ERP_ITEM_MASTER__ASSET: setAsset((Asset)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case InfERPSupportPackage.ERP_ITEM_MASTER__STATUS: setStatus((Status)null); return; case InfERPSupportPackage.ERP_ITEM_MASTER__ASSET: setAsset((Asset)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case InfERPSupportPackage.ERP_ITEM_MASTER__STATUS: return status != null; case InfERPSupportPackage.ERP_ITEM_MASTER__ASSET: return asset != null; } return super.eIsSet(featureID); } } //ErpItemMasterImpl
/* * Licensed to the soi-toolkit project under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The soi-toolkit project licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.soitoolkit.tools.generator.cli.util; /** * Based on an article published at: http://www.javaworld.com/javaworld/jw-08-2004/jw-0816-command.html * * This class holds all the data for an option. This includes the prefix, the key, the separator * (for value options), the multiplicity, and all the other settings describing the option. The class * is designed to be only a data container from a user perspective, i. e. the user has read-access to * any data determined by the {@link Options#check()}, but not access to any of the other methods * which are used internally for the operation of the actual check. */ public class OptionData { private final static String CLASS = "OptionData"; private Options.Prefix prefix = null; private String key = null; private boolean detail = false; private Options.Separator separator = null; private boolean value = false; private Options.Multiplicity multiplicity = null; private java.util.regex.Pattern pattern = null; private int counter = 0; private java.util.ArrayList<String> values = null; private java.util.ArrayList<String> details = null; /** * The constructor */ OptionData(Options.Prefix prefix, String key, boolean detail, Options.Separator separator, boolean value, Options.Multiplicity multiplicity) { if (prefix == null) throw new IllegalArgumentException(CLASS + ": prefix may not be null"); if (key == null) throw new IllegalArgumentException(CLASS + ": key may not be null"); if (separator == null) throw new IllegalArgumentException(CLASS + ": separator may not be null"); if (multiplicity == null) throw new IllegalArgumentException(CLASS + ": multiplicity may not be null"); //.... The data describing the option this.prefix = prefix; this.key = key; this.detail = detail; this.separator = separator; this.value = value; this.multiplicity = multiplicity; //.... Create the pattern to match this option if (value) { if (separator == Options.Separator.BLANK) { if (detail) { pattern = java.util.regex.Pattern.compile(prefix.getName() + key + "((\\w|\\.)+)$"); } else { pattern = java.util.regex.Pattern.compile(prefix.getName() + key + "$"); } } else { if (detail) { pattern = java.util.regex.Pattern.compile(prefix.getName() + key + "((\\w|\\.)+)" + separator.getName() + "(.+)$"); } else { pattern = java.util.regex.Pattern.compile(prefix.getName() + key + separator.getName() + "(.+)$"); } } } else { pattern = java.util.regex.Pattern.compile(prefix.getName() + key + "$"); } //.... Structures to hold result data if (value) { values = new java.util.ArrayList<String>(); if (detail) details = new java.util.ArrayList<String>(); } } /** * Getter method for <code>prefix</code> property * <p> * @return The value for the <code>prefix</code> property */ Options.Prefix getPrefix() { return prefix; } /** * Getter method for <code>key</code> property * <p> * @return The value for the <code>key</code> property */ String getKey() { return key; } /** * Getter method for <code>detail</code> property * <p> * @return The value for the <code>detail</code> property */ boolean useDetail() { return detail; } /** * Getter method for <code>separator</code> property * <p> * @return The value for the <code>separator</code> property */ Options.Separator getSeparator() { return separator; } /** * Getter method for <code>value</code> property * <p> * @return The value for the <code>value</code> property */ boolean useValue() { return value; } /** * Getter method for <code>multiplicity</code> property * <p> * @return The value for the <code>multiplicity</code> property */ Options.Multiplicity getMultiplicity() { return multiplicity; } /** * Getter method for <code>pattern</code> property * <p> * @return The value for the <code>pattern</code> property */ java.util.regex.Pattern getPattern() { return pattern; } /** * Get the number of results found for this option, which is number of times the key matched * <p> * @return The number of results */ public int getResultCount() { if (value) { return values.size(); } else { return counter; } } /** * Get the value with the given index. The index can range between 0 and {@link #getResultCount()}<code> - 1</code>. * However, only for value options, a non-<code>null</code> value will be returned. Non-value options always * return <code>null</code>. * <p> * @param index The index for the desired value * <p> * @return The option value with the given index * <p> * @throws IllegalArgumentException If the value for <code>index</code> is out of bounds */ public String getResultValue(int index) { if (!value) return null; if (index < 0 || index >= getResultCount()) throw new IllegalArgumentException(CLASS + ": illegal value for index"); return values.get(index); } /** * Get the detail with the given index. The index can range between 0 and {@link #getResultCount()}<code> - 1</code>. * However, only for value options which take details, a non-<code>null</code> detail will be returned. Non-value options * and value options which do not take details always return <code>null</code>. * <p> * @param index The index for the desired value * <p> * @return The option detail with the given index * <p> * @throws IllegalArgumentException If the value for <code>index</code> is out of bounds */ public String getResultDetail(int index) { if (!detail) return null; if (index < 0 || index >= getResultCount()) throw new IllegalArgumentException(CLASS + ": illegal value for index"); return details.get(index); } /** * Store the data for a match found */ void addResult(String valueData, String detailData) { if (value) { if (valueData == null) throw new IllegalArgumentException(CLASS + ": valueData may not be null"); values.add(valueData); if (detail) { if (detailData == null) throw new IllegalArgumentException(CLASS + ": detailData may not be null"); details.add(detailData); } } counter++; } /** * This is the overloaded {@link Object#toString()} method, and it is provided mainly for debugging * purposes. * <p> * @return A string representing the instance */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("Prefix : "); sb.append(prefix); sb.append('\n'); sb.append("Key : "); sb.append(key); sb.append('\n'); sb.append("Detail : "); sb.append(detail); sb.append('\n'); sb.append("Separator : "); sb.append(separator); sb.append('\n'); sb.append("Value : "); sb.append(value); sb.append('\n'); sb.append("Multiplicity: "); sb.append(multiplicity); sb.append('\n'); sb.append("Pattern : "); sb.append(pattern); sb.append('\n'); sb.append("Results : "); sb.append(counter); sb.append('\n'); if (value) { if (detail) { for (int i = 0; i < values.size(); i++) { sb.append(details.get(i)); sb.append(" / "); sb.append(values.get(i)); sb.append('\n'); } } else { for (int i = 0; i < values.size(); i++) { sb.append(values.get(i)); sb.append('\n'); } } } return sb.toString(); } }
/* * * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.wso2.carbon.apimgt.rest.api.publisher.impl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.dto.UserApplicationAPIUsage; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.SubscribedAPI; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.rest.api.publisher.SubscriptionsApiService; import org.wso2.carbon.apimgt.rest.api.publisher.dto.ExtendedSubscriptionDTO; import org.wso2.carbon.apimgt.rest.api.publisher.dto.SubscriptionDTO; import org.wso2.carbon.apimgt.rest.api.publisher.dto.SubscriptionListDTO; import org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.APIMappingUtil; import org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.SubscriptionMappingUtil; import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil; import java.io.UnsupportedEncodingException; import java.util.List; import javax.ws.rs.core.Response; /** This is the service implementation class for Publisher subscriptions related operations * */ public class SubscriptionsApiServiceImpl extends SubscriptionsApiService { private static final Log log = LogFactory.getLog(SubscriptionsApiService.class); /** Retieves all subscriptions or retrieves subscriptions for a given API Id * * @param apiId API identifier * @param limit max number of objects returns * @param offset starting index * @param accept accepted media type of the client * @param ifNoneMatch If-None-Match header value * @return Response object containing resulted subscriptions */ @Override public Response subscriptionsGet(String apiId, Integer limit, Integer offset, String accept, String ifNoneMatch) { //pre-processing //setting default limit and offset if they are null limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT; offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT; String username = RestApiUtil.getLoggedInUsername(); String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain(); try { APIProvider apiProvider = RestApiUtil.getProvider(username); SubscriptionListDTO subscriptionListDTO; if (apiId != null) { APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromApiIdOrUUID(apiId, tenantDomain); List<SubscribedAPI> apiUsages = apiProvider.getAPIUsageByAPIId(apiIdentifier); subscriptionListDTO = SubscriptionMappingUtil.fromSubscriptionListToDTO(apiUsages, limit, offset); SubscriptionMappingUtil.setPaginationParams(subscriptionListDTO, apiId, "", limit, offset, apiUsages.size()); } else { UserApplicationAPIUsage[] allApiUsage = apiProvider.getAllAPIUsageByProvider(username); subscriptionListDTO = SubscriptionMappingUtil.fromUserApplicationAPIUsageArrayToDTO(allApiUsage, limit, offset); SubscriptionMappingUtil.setPaginationParams(subscriptionListDTO, "", "", limit, offset, allApiUsage.length); } return Response.ok().entity(subscriptionListDTO).build(); } catch (APIManagementException | UnsupportedEncodingException e) { //Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the // existence of the resource if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String msg = "Error while retrieving subscriptions of API " + apiId; RestApiUtil.handleInternalServerError(msg, e, log); } } return null; } /** * Blocks a subscription * * @param subscriptionId Subscription identifier * @param blockState block state; either BLOCKED or PROD_ONLY_BLOCKED * @param ifMatch If-Match header value * @param ifUnmodifiedSince If-Unmodified-Since header value * @return 200 response if successfully blocked the subscription */ @Override public Response subscriptionsBlockSubscriptionPost(String subscriptionId, String blockState, String ifMatch, String ifUnmodifiedSince) { String username = RestApiUtil.getLoggedInUsername(); APIProvider apiProvider; try { apiProvider = RestApiUtil.getProvider(username); //validates the subscriptionId if it exists SubscribedAPI currentSubscription = apiProvider.getSubscriptionByUUID(subscriptionId); if (currentSubscription == null) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_SUBSCRIPTION, subscriptionId, log); } SubscribedAPI subscribedAPI = new SubscribedAPI(subscriptionId); subscribedAPI.setSubStatus(blockState); apiProvider.updateSubscription(subscribedAPI); SubscribedAPI updatedSubscription = apiProvider.getSubscriptionByUUID(subscriptionId); SubscriptionDTO subscriptionDTO = SubscriptionMappingUtil.fromSubscriptionToDTO(updatedSubscription); return Response.ok().entity(subscriptionDTO).build(); } catch (APIManagementException | UnsupportedEncodingException e) { String msg = "Error while blocking the subscription " + subscriptionId; RestApiUtil.handleInternalServerError(msg, e, log); } return null; } /** * Unblocks a subscription * * @param subscriptionId subscription identifier * @param ifMatch If-Match header value * @param ifUnmodifiedSince If-Unmodified-Since header value * @return 200 response if successfully unblocked the subscription */ @Override public Response subscriptionsUnblockSubscriptionPost(String subscriptionId, String ifMatch, String ifUnmodifiedSince) { String username = RestApiUtil.getLoggedInUsername(); APIProvider apiProvider; try { apiProvider = RestApiUtil.getProvider(username); //validates the subscriptionId if it exists SubscribedAPI currentSubscription = apiProvider.getSubscriptionByUUID(subscriptionId); if (currentSubscription == null) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_SUBSCRIPTION, subscriptionId, log); } SubscribedAPI subscribedAPI = new SubscribedAPI(subscriptionId); subscribedAPI.setSubStatus(APIConstants.SubscriptionStatus.UNBLOCKED); apiProvider.updateSubscription(subscribedAPI); SubscribedAPI updatedSubscribedAPI = apiProvider.getSubscriptionByUUID(subscriptionId); SubscriptionDTO subscriptionDTO = SubscriptionMappingUtil.fromSubscriptionToDTO(updatedSubscribedAPI); return Response.ok().entity(subscriptionDTO).build(); } catch (APIManagementException | UnsupportedEncodingException e) { String msg = "Error while unblocking the subscription " + subscriptionId; RestApiUtil.handleInternalServerError(msg, e, log); } return null; } /** * Gets a subscription by identifier * * @param subscriptionId subscription identifier * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @return matched subscription as a SubscriptionDTO */ @Override public Response subscriptionsSubscriptionIdGet(String subscriptionId, String accept, String ifNoneMatch, String ifModifiedSince) { String username = RestApiUtil.getLoggedInUsername(); APIProvider apiProvider; try { apiProvider = RestApiUtil.getProvider(username); SubscribedAPI subscribedAPI = apiProvider.getSubscriptionByUUID(subscriptionId); if (subscribedAPI != null) { String externalWorkflowRefId = null; try { externalWorkflowRefId = apiProvider.getExternalWorkflowReferenceId(subscribedAPI.getSubscriptionId()); } catch (APIManagementException e) { // need not fail if querying workflow reference id throws and error; log and continue log.error("Error while retrieving external workflow reference for subscription id: " + subscriptionId, e); } ExtendedSubscriptionDTO subscriptionDTO = SubscriptionMappingUtil. fromSubscriptionToExtendedSubscriptionDTO(subscribedAPI, externalWorkflowRefId); return Response.ok().entity(subscriptionDTO).build(); } else { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_SUBSCRIPTION, subscriptionId, log); } } catch (APIManagementException | UnsupportedEncodingException e) { String msg = "Error while getting the subscription " + subscriptionId; RestApiUtil.handleInternalServerError(msg, e, log); } return null; } }
import java.io.*; import java.util.*; import javax.comm.*; public class listener implements Runnable, SerialPortEventListener { //public class listener { public static String keyPadMsgs; public static boolean listening; static CommPortIdentifier portId; static Enumeration portList; /* static OutputStream outputStream; static InputStream inputStream; static SerialPort serialPort; */ static OutputStream outputStream; static InputStream inputStream; static SerialPort serialPort; Thread readThread=null; static PrintWriter outWml; static PrintWriter outHtml; static int SndCkSum; static int SndKey; static int SndKeyOff; static int CkSum; static int Key; static int keyoff; static int i; public static void main(String[] args) { // listener test = new listener(); listening=false; portList = CommPortIdentifier.getPortIdentifiers(); while (portList.hasMoreElements()) { portId = (CommPortIdentifier) portList.nextElement(); if (portId.getPortType() == CommPortIdentifier.PORT_SERIAL) { if (portId.getName().equals("COM1")) { //if (portId.getName().equals("/dev/term/a")) { // listener reader = new listener(); try { serialPort = (SerialPort) portId.open("SimpleReadApp", 2000); } catch (PortInUseException e) {} try { outputStream = serialPort.getOutputStream(); } catch (IOException e) {} try { inputStream = serialPort.getInputStream(); } catch (IOException e) {} try { serialPort.setSerialPortParams(9600, SerialPort.DATABITS_8, SerialPort.STOPBITS_1, SerialPort.PARITY_EVEN); } catch (UnsupportedCommOperationException e) {} while (serialPort.isCTS() == false) { System.out.print("CTS false, looping\n"); } listener reader = new listener(); } } } } /************ Thread ***********/ public listener() { if (!listening) { System.out.println("Starting thread"); try { serialPort.addEventListener(this); } catch (TooManyListenersException e) {} serialPort.notifyOnDataAvailable(true); readThread = new Thread(this); readThread.start(); listening=true; } else System.out.println("Not starting thread"); } public void run() { try { Thread.sleep(20000); } catch (InterruptedException e) {} } public void serialEvent(SerialPortEvent event) { switch(event.getEventType()) { case SerialPortEvent.BI: case SerialPortEvent.OE: case SerialPortEvent.FE: case SerialPortEvent.PE: case SerialPortEvent.CD: case SerialPortEvent.CTS: case SerialPortEvent.DSR: case SerialPortEvent.RI: case SerialPortEvent.OUTPUT_BUFFER_EMPTY: break; case SerialPortEvent.DATA_AVAILABLE: byte[] readByte = new byte[1]; byte[] readBuffer = new byte[34]; byte[] keyPad = new byte[32]; byte checkSum; byte counter=0; byte i=0; boolean notDone=true; boolean panel; boolean status; try { panel=false; status=false; int numBytes = inputStream.read(readByte); while (notDone && (inputStream.available() > 0)) { if (readByte[0] == 3) { inputStream.read(readByte); if (readByte[0] == 0x25) { notDone=false; panel=true; } } if (readByte[0] == 1) { inputStream.read(readByte); if (readByte[0] == 6) { notDone=false; status=true; } } if (notDone) { // Dump unrecognized data. System.out.print(" Dumping " + Integer.toHexString((int) readByte[0] & 0xFF) ); inputStream.read(readByte); } } if (notDone) System.out.println("\nRan out of data??"); if (status) { // Real time status message i=0; while (inputStream.available() < 4) { try { Thread.sleep(10); } catch (InterruptedException e) {} counter++; if (counter>100) break; } if (counter<100) { while (i < 4) { inputStream.read(readByte); readBuffer[i++]=readByte[0]; } System.out.println("\nYeah!! Got a status msg!"); System.out.println(Integer.toHexString((int) readBuffer[0] & 0xFF) + " " + Integer.toHexString((int) readBuffer[1] & 0xFF) + " " + Integer.toHexString((int) readBuffer[2] & 0xFF) + " " + Integer.toHexString((int) readBuffer[3] & 0xFF)); System.out.println("Yeah!! Finished status msg!\n"); } else System.out.println("Timeout in status!"); } if (panel) { // Change in panel display numBytes = inputStream.read(readByte); // Dump these two numBytes = inputStream.read(readByte); // Dump these two i=0; while (inputStream.available() < keyPad.length) { try { Thread.sleep(10); } catch (InterruptedException e) {} counter++; if (counter>100) break; } if (inputStream.available() >= keyPad.length + 1) { numBytes = inputStream.read(keyPad); inputStream.read(readByte); checkSum=readByte[0]; } if (counter<100) { keyPadMsgs = new String(keyPad); String firstLine = keyPadMsgs.substring(0,16); String secondLine = keyPadMsgs.substring(16); /* outWml = new PrintWriter( new OutputStreamWriter( new FileOutputStream("C:\\Program Files\\Apache Group\\Apache\\htdocs\\a.wml"))); outHtml = new PrintWriter( new OutputStreamWriter( new FileOutputStream("C:\\Program Files\\Apache Group\\Apache\\htdocs\\a.html"))); outHtml.write(firstLine + "<br>" + secondLine); outWml.write("<?xml version=\"1.0\"?>"); outWml.write("<!DOCTYPE wml PUBLIC \"-//PHONE.COM//DTD WML 1.1//EN\""); outWml.write("\"http://www.phone.com/dtd/wml11.dtd\"><wml>"); outWml.write("<head>"); outWml.write("<meta http-equiv=\"Cache_Control\" content=\"max-age=0\"/>"); outWml.write("</head>"); outWml.write("<card ontimer='a.wml'><timer value='30'/><p>"); outWml.write(firstLine + "<br/>" + secondLine); outWml.write("</p></card></wml>"); outWml.close(); outHtml.close(); System.out.print(firstLine + "\n" + secondLine); System.out.println("\n****************");*/ } } } catch (IOException e) {} break; } } } // System.out.print (Integer.toHexString((int) readBuffer[i] & 0xFF) + "->" + (char) readBuffer[i] + " ");
package redis.clients.jedis; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import redis.clients.jedis.BinaryClient.LIST_POSITION; import redis.clients.util.Hashing; import redis.clients.util.Sharded; public class BinaryShardedJedis extends Sharded<Jedis, JedisShardInfo> implements BinaryJedisCommands { public BinaryShardedJedis(List<JedisShardInfo> shards) { super(shards); } public BinaryShardedJedis(List<JedisShardInfo> shards, Hashing algo) { super(shards, algo); } public BinaryShardedJedis(List<JedisShardInfo> shards, Pattern keyTagPattern) { super(shards, keyTagPattern); } public BinaryShardedJedis(List<JedisShardInfo> shards, Hashing algo, Pattern keyTagPattern) { super(shards, algo, keyTagPattern); } public void disconnect() throws IOException { for (Jedis jedis : getAllShards()) { jedis.disconnect(); } } protected Jedis create(JedisShardInfo shard) { return new Jedis(shard); } public String set(byte[] key, byte[] value) { Jedis j = getShard(key); return j.set(key, value); } public byte[] get(byte[] key) { Jedis j = getShard(key); return j.get(key); } public Boolean exists(byte[] key) { Jedis j = getShard(key); return j.exists(key); } public String type(byte[] key) { Jedis j = getShard(key); return j.type(key); } public Long expire(byte[] key, int seconds) { Jedis j = getShard(key); return j.expire(key, seconds); } public Long expireAt(byte[] key, long unixTime) { Jedis j = getShard(key); return j.expireAt(key, unixTime); } public Long ttl(byte[] key) { Jedis j = getShard(key); return j.ttl(key); } public byte[] getSet(byte[] key, byte[] value) { Jedis j = getShard(key); return j.getSet(key, value); } public Long setnx(byte[] key, byte[] value) { Jedis j = getShard(key); return j.setnx(key, value); } public String setex(byte[] key, int seconds, byte[] value) { Jedis j = getShard(key); return j.setex(key, seconds, value); } public Long decrBy(byte[] key, long integer) { Jedis j = getShard(key); return j.decrBy(key, integer); } public Long decr(byte[] key) { Jedis j = getShard(key); return j.decr(key); } public Long incrBy(byte[] key, long integer) { Jedis j = getShard(key); return j.incrBy(key, integer); } public Long incr(byte[] key) { Jedis j = getShard(key); return j.incr(key); } public Long append(byte[] key, byte[] value) { Jedis j = getShard(key); return j.append(key, value); } public byte[] substr(byte[] key, int start, int end) { Jedis j = getShard(key); return j.substr(key, start, end); } public Long hset(byte[] key, byte[] field, byte[] value) { Jedis j = getShard(key); return j.hset(key, field, value); } public byte[] hget(byte[] key, byte[] field) { Jedis j = getShard(key); return j.hget(key, field); } public Long hsetnx(byte[] key, byte[] field, byte[] value) { Jedis j = getShard(key); return j.hsetnx(key, field, value); } public String hmset(byte[] key, Map<byte[], byte[]> hash) { Jedis j = getShard(key); return j.hmset(key, hash); } public List<byte[]> hmget(byte[] key, byte[]... fields) { Jedis j = getShard(key); return j.hmget(key, fields); } public Long hincrBy(byte[] key, byte[] field, long value) { Jedis j = getShard(key); return j.hincrBy(key, field, value); } public Boolean hexists(byte[] key, byte[] field) { Jedis j = getShard(key); return j.hexists(key, field); } public Long hdel(byte[] key, byte[]... fields) { Jedis j = getShard(key); return j.hdel(key, fields); } public Long hlen(byte[] key) { Jedis j = getShard(key); return j.hlen(key); } public Set<byte[]> hkeys(byte[] key) { Jedis j = getShard(key); return j.hkeys(key); } public Collection<byte[]> hvals(byte[] key) { Jedis j = getShard(key); return j.hvals(key); } public Map<byte[], byte[]> hgetAll(byte[] key) { Jedis j = getShard(key); return j.hgetAll(key); } public Long rpush(byte[] key, byte[]... strings) { Jedis j = getShard(key); return j.rpush(key, strings); } public Long lpush(byte[] key, byte[]... strings) { Jedis j = getShard(key); return j.lpush(key, strings); } public Long lpushx(byte[] key, byte[] string) { Jedis j = getShard(key); return j.lpushx(key, string); } public Long rpushx(byte[] key, byte[] string) { Jedis j = getShard(key); return j.rpushx(key, string); } public Long llen(byte[] key) { Jedis j = getShard(key); return j.llen(key); } public List<byte[]> lrange(byte[] key, int start, int end) { Jedis j = getShard(key); return j.lrange(key, start, end); } public String ltrim(byte[] key, int start, int end) { Jedis j = getShard(key); return j.ltrim(key, start, end); } public byte[] lindex(byte[] key, int index) { Jedis j = getShard(key); return j.lindex(key, index); } public String lset(byte[] key, int index, byte[] value) { Jedis j = getShard(key); return j.lset(key, index, value); } public Long lrem(byte[] key, int count, byte[] value) { Jedis j = getShard(key); return j.lrem(key, count, value); } public byte[] lpop(byte[] key) { Jedis j = getShard(key); return j.lpop(key); } public byte[] rpop(byte[] key) { Jedis j = getShard(key); return j.rpop(key); } public Long sadd(byte[] key, byte[]... members) { Jedis j = getShard(key); return j.sadd(key, members); } public Set<byte[]> smembers(byte[] key) { Jedis j = getShard(key); return j.smembers(key); } public Long srem(byte[] key, byte[]... members) { Jedis j = getShard(key); return j.srem(key, members); } public byte[] spop(byte[] key) { Jedis j = getShard(key); return j.spop(key); } public Long scard(byte[] key) { Jedis j = getShard(key); return j.scard(key); } public Boolean sismember(byte[] key, byte[] member) { Jedis j = getShard(key); return j.sismember(key, member); } public byte[] srandmember(byte[] key) { Jedis j = getShard(key); return j.srandmember(key); } public Long zadd(byte[] key, double score, byte[] member) { Jedis j = getShard(key); return j.zadd(key, score, member); } public Long zadd(byte[] key, Map<Double, byte[]> scoreMembers) { Jedis j = getShard(key); return j.zadd(key, scoreMembers); } public Set<byte[]> zrange(byte[] key, int start, int end) { Jedis j = getShard(key); return j.zrange(key, start, end); } public Long zrem(byte[] key, byte[]... members) { Jedis j = getShard(key); return j.zrem(key, members); } public Double zincrby(byte[] key, double score, byte[] member) { Jedis j = getShard(key); return j.zincrby(key, score, member); } public Long zrank(byte[] key, byte[] member) { Jedis j = getShard(key); return j.zrank(key, member); } public Long zrevrank(byte[] key, byte[] member) { Jedis j = getShard(key); return j.zrevrank(key, member); } public Set<byte[]> zrevrange(byte[] key, int start, int end) { Jedis j = getShard(key); return j.zrevrange(key, start, end); } public Set<Tuple> zrangeWithScores(byte[] key, int start, int end) { Jedis j = getShard(key); return j.zrangeWithScores(key, start, end); } public Set<Tuple> zrevrangeWithScores(byte[] key, int start, int end) { Jedis j = getShard(key); return j.zrevrangeWithScores(key, start, end); } public Long zcard(byte[] key) { Jedis j = getShard(key); return j.zcard(key); } public Double zscore(byte[] key, byte[] member) { Jedis j = getShard(key); return j.zscore(key, member); } public List<byte[]> sort(byte[] key) { Jedis j = getShard(key); return j.sort(key); } public List<byte[]> sort(byte[] key, SortingParams sortingParameters) { Jedis j = getShard(key); return j.sort(key, sortingParameters); } public Long zcount(byte[] key, double min, double max) { Jedis j = getShard(key); return j.zcount(key, min, max); } public Long zcount(byte[] key, byte[] min, byte[] max) { Jedis j = getShard(key); return j.zcount(key, min, max); } public Set<byte[]> zrangeByScore(byte[] key, double min, double max) { Jedis j = getShard(key); return j.zrangeByScore(key, min, max); } public Set<byte[]> zrangeByScore(byte[] key, double min, double max, int offset, int count) { Jedis j = getShard(key); return j.zrangeByScore(key, min, max, offset, count); } public Set<Tuple> zrangeByScoreWithScores(byte[] key, double min, double max) { Jedis j = getShard(key); return j.zrangeByScoreWithScores(key, min, max); } public Set<Tuple> zrangeByScoreWithScores(byte[] key, double min, double max, int offset, int count) { Jedis j = getShard(key); return j.zrangeByScoreWithScores(key, min, max, offset, count); } public Set<Tuple> zrangeByScoreWithScores(byte[] key, byte[] min, byte[] max) { Jedis j = getShard(key); return j.zrangeByScoreWithScores(key, min, max); } public Set<Tuple> zrangeByScoreWithScores(byte[] key, byte[] min, byte[] max, int offset, int count) { Jedis j = getShard(key); return j.zrangeByScoreWithScores(key, min, max, offset, count); } public Set<byte[]> zrevrangeByScore(byte[] key, double max, double min) { Jedis j = getShard(key); return j.zrevrangeByScore(key, max, min); } public Set<byte[]> zrevrangeByScore(byte[] key, double max, double min, int offset, int count) { Jedis j = getShard(key); return j.zrevrangeByScore(key, max, min, offset, count); } public Set<Tuple> zrevrangeByScoreWithScores(byte[] key, double max, double min) { Jedis j = getShard(key); return j.zrevrangeByScoreWithScores(key, max, min); } public Set<Tuple> zrevrangeByScoreWithScores(byte[] key, double max, double min, int offset, int count) { Jedis j = getShard(key); return j.zrevrangeByScoreWithScores(key, max, min, offset, count); } public Set<byte[]> zrevrangeByScore(byte[] key, byte[] max, byte[] min) { Jedis j = getShard(key); return j.zrevrangeByScore(key, max, min); } public Set<byte[]> zrevrangeByScore(byte[] key, byte[] max, byte[] min, int offset, int count) { Jedis j = getShard(key); return j.zrevrangeByScore(key, max, min, offset, count); } public Set<Tuple> zrevrangeByScoreWithScores(byte[] key, byte[] max, byte[] min) { Jedis j = getShard(key); return j.zrevrangeByScoreWithScores(key, max, min); } public Set<Tuple> zrevrangeByScoreWithScores(byte[] key, byte[] max, byte[] min, int offset, int count) { Jedis j = getShard(key); return j.zrevrangeByScoreWithScores(key, max, min, offset, count); } public Long zremrangeByRank(byte[] key, int start, int end) { Jedis j = getShard(key); return j.zremrangeByRank(key, start, end); } public Long zremrangeByScore(byte[] key, double start, double end) { Jedis j = getShard(key); return j.zremrangeByScore(key, start, end); } public Long zremrangeByScore(byte[] key, byte[] start, byte[] end) { Jedis j = getShard(key); return j.zremrangeByScore(key, start, end); } public Long linsert(byte[] key, LIST_POSITION where, byte[] pivot, byte[] value) { Jedis j = getShard(key); return j.linsert(key, where, pivot, value); } @Deprecated public List<Object> pipelined(ShardedJedisPipeline shardedJedisPipeline) { shardedJedisPipeline.setShardedJedis(this); shardedJedisPipeline.execute(); return shardedJedisPipeline.getResults(); } public ShardedJedisPipeline pipelined() { ShardedJedisPipeline pipeline = new ShardedJedisPipeline(); pipeline.setShardedJedis(this); return pipeline; } public Long objectRefcount(byte[] key) { Jedis j = getShard(key); return j.objectRefcount(key); } public byte[] objectEncoding(byte[] key) { Jedis j = getShard(key); return j.objectEncoding(key); } public Long objectIdletime(byte[] key) { Jedis j = getShard(key); return j.objectIdletime(key); } }
/** * Copyright 2015 Steffen Kremp * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.pictura.servlet; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import javax.servlet.ServletOutputStream; import javax.servlet.WriteListener; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * @author Steffen Kremp */ public class CSSColorPaletteRequestProcessorTest { @Test public void testIsPreferred() throws Exception { System.out.println("isPreferred"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertTrue(rp.isPreferred(req)); } @Test public void testCreateRequestProcessor() throws Exception { System.out.println("createRequestProcessor"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); assertTrue(rp.createRequestProcessor() instanceof CSSColorPaletteRequestProcessor); } @Test public void testGetRequestedPrefix() throws Exception { System.out.println("getRequestedPrefix"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertEquals("foo-", rp.getRequestedPrefix(req)); } @Test(expected = IllegalArgumentException.class) public void testGetRequestedPrefix_IllegalArgumentException() throws Exception { System.out.println("getRequestedPrefix_IllegalArgumentException"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo@/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); rp.getRequestedPrefix(req); } @Test public void testGetRequestedLinearGradient() throws Exception { System.out.println("getRequestedLinearGradient"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertTrue(rp.getRequestedLinearGradient(req)); } @Test public void testGetRequestedLinearGradient_Default() throws Exception { System.out.println("getRequestedLinearGradient_Default"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertFalse(rp.getRequestedLinearGradient(req)); } @Test public void testGetRequestedIgnoreWhite() throws Exception { System.out.println("getRequestedIgnoreWhite"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/iw=0/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertFalse(rp.getRequestedIgnoreWhite(req)); } @Test public void testGetRequestedIgnoreWhite_Default() throws Exception { System.out.println("getRequestedIgnoreWhite"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertTrue(rp.getRequestedIgnoreWhite(req)); } @Test public void testGetRequestedColorCount() throws Exception { System.out.println("getRequestedColorCount"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/iw=1/cc=5/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertSame(5, rp.getRequestedColorCount(req)); } @Test(expected = IllegalArgumentException.class) public void testGetRequestedColorCount_IllegalArgument1() throws Exception { System.out.println("getRequestedColorCount_IllegalArgument1"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/iw=1/cc=1/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); rp.getRequestedColorCount(req); } @Test(expected = IllegalArgumentException.class) public void testGetRequestedColorCount_IllegalArgument2() throws Exception { System.out.println("getRequestedColorCount_IllegalArgument2"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/iw=1/cc=33/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); rp.getRequestedColorCount(req); } @Test(expected = IllegalArgumentException.class) public void testGetRequestedScaleForceUpscale_IllegalArgument() throws Exception { System.out.println("getRequestedScaleForceUpscale_IllegalArgument"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/iw=1/cc=33/s=w222,u/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); rp.getRequestedScaleForceUpscale(req); } @Test public void testGetRequestedScaleForceUpscale() throws Exception { System.out.println("getRequestedScaleForceUpscale"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/iw=1/cc=33/s=w222/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); rp.getRequestedScaleForceUpscale(req); } @Test(expected = IllegalArgumentException.class) public void testGetRequestedFormatOption_IllegalArgument() throws Exception { System.out.println("getRequestedFormatOption_IllegalArgument"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss,p/pf=foo/lg=1/iw=1/cc=33/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); rp.getRequestedFormatOption(req); } @Test(expected = IllegalArgumentException.class) public void testGetRequestedFormatEncoding_IllegalArgument() throws Exception { System.out.println("getRequestedFormatEncoding_IllegalArgument"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/pf=foo/lg=1/iw=1/cc=33/s=w222,u/fe=b64/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); rp.getRequestedFormatEncoding(req); } @Test public void testGetRequestedFormatEncoding() throws Exception { System.out.println("getRequestedFormatEncoding"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/lg=1/iw=1/cc=33/s=w222,u/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertNull(rp.getRequestedFormatEncoding(req)); } @Test public void testGetRequestedFormatName() throws Exception { System.out.println("getRequestedFormatName"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/f=pcss/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertNull(rp.getRequestedFormatName(req)); } @Test public void testIsProxyRequest() throws Exception { System.out.println("isProxyRequest"); CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/pictura-web"); when(req.getServletPath()).thenReturn("/images"); when(req.getRequestURI()).thenReturn("/pictura-web/images/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); assertFalse(rp.isProxyRequest(req)); } @Test public void testDoProcess() throws Exception { System.out.println("doProcess"); PicturaImageIO.scanForPlugins(); final ByteArrayOutputStream bos = new ByteArrayOutputStream(); final ServletOutputStream sos = new ServletOutputStream() { @Override public void write(int b) throws IOException { bos.write(b); } public boolean isReady() { return true; } public void setWriteListener(WriteListener arg0) { } }; FileResourceLocator frl = new FileResourceLocator() { @Override protected String getRootPath() { try { URL url = CSSColorPaletteRequestProcessorTest.class.getResource("/lenna.jpg"); File f = new File(url.toURI()); return f.getParentFile().getAbsolutePath(); } catch (Throwable t) { fail(); } return null; } }; CSSColorPaletteRequestProcessor rp = new CSSColorPaletteRequestProcessor(); HttpServletRequest req = mock(HttpServletRequest.class); when(req.getContextPath()).thenReturn("/"); when(req.getServletPath()).thenReturn(""); when(req.getRequestURI()).thenReturn("/f=pcss/lg=1/lenna.jpg"); when(req.getQueryString()).thenReturn(null); when(req.getParameterNames()).thenReturn(Collections.enumeration(new ArrayList<String>(0))); when(req.getAttribute("io.pictura.servlet.MAX_IMAGE_FILE_SIZE")).thenReturn(1024L * 1024L); when(req.getAttribute("io.pictura.servlet.MAX_IMAGE_RESOLUTION")).thenReturn(1000L * 1000L); when(req.getAttribute("io.pictura.servlet.DEFLATER_COMPRESSION_MIN_SIZE")).thenReturn(1024 * 100); HttpServletResponse resp = mock(HttpServletResponse.class); when(resp.getOutputStream()).thenReturn(sos); rp.setRequest(req); rp.setResponse(resp); rp.setResourceLocators(new ResourceLocator[]{frl}); rp.doProcess(req, resp); String css = bos.toString(); assertTrue(css.contains(".fg-0{color:#e8b499;}")); assertTrue(css.contains(".bg-0{background-color:#e8b499;}")); assertTrue(css.contains(".bg-lg{background:linear-gradient(#e8b499,#d68d6f,#ca9f9f,#bb907f,#985643,#957179,#78241a,#523030,#311f21);background:-webkit-linear-gradient(#e8b499,#d68d6f,#ca9f9f,#bb907f,#985643,#957179,#78241a,#523030,#311f21);background:-moz-linear-gradient(#e8b499,#d68d6f,#ca9f9f,#bb907f,#985643,#957179,#78241a,#523030,#311f21);background:-o-linear-gradient(#e8b499,#d68d6f,#ca9f9f,#bb907f,#985643,#957179,#78241a,#523030,#311f21);}")); } }
package com.ctrip.xpipe.redis.keeper.impl; import com.ctrip.xpipe.api.command.Command; import com.ctrip.xpipe.api.server.PARTIAL_STATE; import com.ctrip.xpipe.concurrent.AbstractExceptionLogTask; import com.ctrip.xpipe.endpoint.DefaultEndPoint; import com.ctrip.xpipe.redis.core.protocal.MASTER_STATE; import com.ctrip.xpipe.redis.core.protocal.Psync; import com.ctrip.xpipe.redis.core.protocal.cmd.DefaultPsync; import com.ctrip.xpipe.redis.core.protocal.cmd.Replconf; import com.ctrip.xpipe.redis.core.protocal.cmd.Replconf.ReplConfType; import com.ctrip.xpipe.redis.core.protocal.protocal.EofType; import com.ctrip.xpipe.redis.keeper.RdbDumper; import com.ctrip.xpipe.redis.keeper.RedisKeeperServer; import com.ctrip.xpipe.redis.keeper.RedisMaster; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.nio.NioEventLoopGroup; import java.io.IOException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; /** * @author wenchao.meng * * Aug 24, 2016 */ public class DefaultRedisMasterReplication extends AbstractRedisMasterReplication{ private volatile PARTIAL_STATE partialState = PARTIAL_STATE.UNKNOWN; private ScheduledFuture<?> replConfFuture; protected int masterConnectRetryDelaySeconds = Integer.parseInt(System.getProperty(KEY_MASTER_CONNECT_RETRY_DELAY_SECONDS, "2")); public DefaultRedisMasterReplication(RedisMaster redisMaster, RedisKeeperServer redisKeeperServer, NioEventLoopGroup nioEventLoopGroup, ScheduledExecutorService scheduled, int replTimeoutMilli) { super(redisKeeperServer, redisMaster, nioEventLoopGroup, scheduled, replTimeoutMilli); } public DefaultRedisMasterReplication(RedisMaster redisMaster, RedisKeeperServer redisKeeperServer, NioEventLoopGroup nioEventLoopGroup, ScheduledExecutorService scheduled) { this(redisMaster, redisKeeperServer, nioEventLoopGroup, scheduled, DEFAULT_REPLICATION_TIMEOUT_MILLI); } @Override protected void doConnect(Bootstrap b) { redisMaster.setMasterState(MASTER_STATE.REDIS_REPL_CONNECTING); tryConnect(b).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { if(!future.isSuccess()){ logger.error("[operationComplete][fail connect with master]" + redisMaster, future.cause()); scheduled.schedule(new Runnable() { @Override public void run() { try{ connectWithMaster(); }catch(Throwable th){ logger.error("[run][connectUntilConnected]" + DefaultRedisMasterReplication.this, th); } } }, masterConnectRetryDelaySeconds, TimeUnit.SECONDS); } } }); } @Override public void masterConnected(Channel channel) { redisMaster.setMasterState(MASTER_STATE.REDIS_REPL_HANDSHAKE); super.masterConnected(channel); cancelReplConf(); } @Override public void masterDisconntected(Channel channel) { super.masterDisconntected(channel); long interval = System.currentTimeMillis() - connectedTime; long scheduleTime = masterConnectRetryDelaySeconds * 1000 - interval; if (scheduleTime < 0) { scheduleTime = 0; } logger.info("[masterDisconntected][reconnect after {} ms]", scheduleTime); scheduled.schedule(new Runnable() { @Override public void run() { connectWithMaster(); } }, scheduleTime, TimeUnit.MILLISECONDS); } public void setMasterConnectRetryDelaySeconds(int masterConnectRetryDelaySeconds) { this.masterConnectRetryDelaySeconds = masterConnectRetryDelaySeconds; } @Override public void stopReplication() { super.stopReplication(); cancelReplConf(); } private void scheduleReplconf() { if (logger.isInfoEnabled()) { logger.info("[scheduleReplconf]" + this); } cancelReplConf(); replConfFuture = scheduled.scheduleWithFixedDelay(new AbstractExceptionLogTask() { @Override protected void doRun() throws Exception { logger.debug("[run][send ack]{}", masterChannel); Command<Object> command = createReplConf(); command.execute(); } }, 0, REPLCONF_INTERVAL_MILLI, TimeUnit.MILLISECONDS); } protected void cancelReplConf() { if (replConfFuture != null) { replConfFuture.cancel(true); replConfFuture = null; } } protected Command<Object> createReplConf() { return new Replconf(clientPool, ReplConfType.ACK, scheduled, String.valueOf(redisMaster.getCurrentReplicationStore().getEndOffset())); } @Override protected void psyncFail(Throwable cause) { logger.info("[psyncFail][close channel, wait for reconnect]" + this, cause); masterChannel.close(); } @Override protected Psync createPsync() { Psync psync = new DefaultPsync(clientPool, redisMaster.masterEndPoint(), redisMaster.getReplicationStoreManager(), scheduled); psync.addPsyncObserver(this); psync.addPsyncObserver(redisKeeperServer); return psync; } @Override public PARTIAL_STATE partialState() { return partialState; } @Override protected void doBeginWriteRdb(EofType eofType, long masterRdbOffset) throws IOException { redisMaster.setMasterState(MASTER_STATE.REDIS_REPL_TRANSFER); partialState = PARTIAL_STATE.FULL; redisMaster.getCurrentReplicationStore().getMetaStore().setMasterAddress((DefaultEndPoint) redisMaster.masterEndPoint()); } @Override protected void doEndWriteRdb() { logger.info("[doEndWriteRdb]{}", this); redisMaster.setMasterState(MASTER_STATE.REDIS_REPL_CONNECTED); scheduleReplconf(); } @Override protected void doOnContinue(){ logger.info("[doOnContinue]{}", this); redisMaster.setMasterState(MASTER_STATE.REDIS_REPL_CONNECTED); try { redisMaster.getCurrentReplicationStore().getMetaStore().setMasterAddress((DefaultEndPoint) redisMaster.masterEndPoint()); } catch (IOException e) { logger.error("[doOnContinue]" + this, e); } scheduleReplconf(); partialState = PARTIAL_STATE.PARTIAL; redisKeeperServer.getRedisKeeperServerState().initPromotionState(); } @Override protected void doReFullSync() { redisKeeperServer.getRedisKeeperServerState().initPromotionState(); } @Override protected void doOnFullSync() { try { logger.info("[doOnFullSync]{}", this); RdbDumper rdbDumper = new RedisMasterReplicationRdbDumper(this, redisKeeperServer); setRdbDumper(rdbDumper); redisKeeperServer.setRdbDumper(rdbDumper, true); } catch (SetRdbDumperException e) { //impossible to happen logger.error("[doOnFullSync][impossible to happen]", e); } } @Override protected String getSimpleName() { return "DefRep"; } }
// ======================================================================== // //File: $RCSfile: PlaceHolderManager.java,v $ //Version: $Revision: 1.22 $ //Modified: $Date: 2013/05/10 13:25:23 $ // //(c) Copyright 2005-2014 by Mentor Graphics Corp. All rights reserved. // //======================================================================== // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. //======================================================================== // package com.mentor.nucleus.bp.ui.text.placeholder; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.UUID; import org.eclipse.core.internal.resources.Marker; import org.eclipse.core.internal.resources.ProjectPreferences; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IResourceRuleFactory; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.IWorkspaceRunnable; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.resources.WorkspaceJob; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.ISchedulingRule; import org.eclipse.core.runtime.jobs.MultiRule; import org.eclipse.core.runtime.preferences.IEclipsePreferences.IPreferenceChangeListener; import org.eclipse.core.runtime.preferences.IEclipsePreferences.PreferenceChangeEvent; import org.eclipse.swt.custom.BusyIndicator; import org.eclipse.ui.PlatformUI; import com.mentor.nucleus.bp.core.Bridge_c; import com.mentor.nucleus.bp.core.Component_c; import com.mentor.nucleus.bp.core.CorePlugin; import com.mentor.nucleus.bp.core.ExternalEntity_c; import com.mentor.nucleus.bp.core.Function_c; import com.mentor.nucleus.bp.core.Ooaofooa; import com.mentor.nucleus.bp.core.Operation_c; import com.mentor.nucleus.bp.core.ProvidedOperation_c; import com.mentor.nucleus.bp.core.RequiredOperation_c; import com.mentor.nucleus.bp.core.SystemModel_c; import com.mentor.nucleus.bp.core.common.AttributeChangeModelDelta; import com.mentor.nucleus.bp.core.common.IAllActivityModifier; import com.mentor.nucleus.bp.core.common.IModelDelta; import com.mentor.nucleus.bp.core.common.IdAssigner; import com.mentor.nucleus.bp.core.common.ModelChangedEvent; import com.mentor.nucleus.bp.core.common.ModelElement; import com.mentor.nucleus.bp.core.common.ModelRoot; import com.mentor.nucleus.bp.core.common.NonRootModelElement; import com.mentor.nucleus.bp.core.common.PersistableModelComponent; import com.mentor.nucleus.bp.core.common.PersistenceManager; import com.mentor.nucleus.bp.core.ui.preferences.BridgePointProjectActionLanguagePreferences; import com.mentor.nucleus.bp.ui.text.AbstractModelElementListener; import com.mentor.nucleus.bp.ui.text.ModelElementID; import com.mentor.nucleus.bp.ui.text.TextPlugin; import com.mentor.nucleus.bp.ui.text.activity.ActivityEditorInputFactory; import com.mentor.nucleus.bp.ui.text.activity.AllActivityModifier; import com.mentor.nucleus.bp.ui.text.description.DescriptionEditorInputFactory; import com.mentor.nucleus.bp.ui.text.placeholder.PlaceHolderEntry.PlaceHolderFileProxy; public class PlaceHolderManager { /** * The WorkspaceRunnable's thread name used to the reWrite the placeholder files * This string used by the renaming SystemModel and Domain TestCases to wait until * this thread completes. * @see PlaceHolderManager#reWritePlaceHolders(ArrayList list) * @see PlaceHolderFileReWriter */ public static String PLACEHOLDER_REWRITER_THREAD_NAME = "__PlaceHolderFileReWriter_Thread"; //$NON-NLS-1$ static String[] supportedExtensions = new String[]{ActivityEditorInputFactory.PLACEHOLDER_EXTENSION, DescriptionEditorInputFactory.PLACEHOLDER_EXTENSION}; Map<ModelElementID, PlaceHolderEntry> placeholderMap = new TreeMap<ModelElementID, PlaceHolderEntry>(); private boolean parseInProgress = false; protected PlaceHolderManager() { PlaceHolderStateSynchronizer stateSynchronizer = new PlaceHolderStateSynchronizer(); initializeFor(PersistenceManager.findRootComponentInstances()); Ooaofooa.getDefaultInstance().addModelChangeListener(stateSynchronizer); ResourcesPlugin.getWorkspace().addResourceChangeListener(stateSynchronizer); CorePlugin.addProjectPreferenceListener(stateSynchronizer); } private static PlaceHolderManager defaultInstance = null; public static PlaceHolderManager getDefaultInstance() { if (defaultInstance == null) { defaultInstance = new PlaceHolderManager(); } return defaultInstance; } public boolean parseInProgress() { return parseInProgress; } public void setParseInProgress(boolean flag) { parseInProgress = flag; } /** * This method always returns a valid IFile instance. It creates a new PlaceHolderEntry if none already * exits. * * @param modelElementID Must not be null, used as the key for the PlaceHolderMap * @param extension Currently can only be "oal" or "dsc" * @param requester Must not be null * @return An already exsiting IFile or a newly created instance * * @throws IllegalArgumentException if the extension is not knows */ public IFile getPlaceholderFile(ModelElementID modelElementID, String extension, Object requester) { return getPlaceholderFile(modelElementID, extension, requester, true); } public IFile getPlaceholderFile(ModelElementID modelElementID, String extension, Object requester, boolean toCreateNew) { if(!isSupportedExtension(extension)){ throw new IllegalArgumentException("extension " + extension + " is not supported"); //$NON-NLS-1$//$NON-NLS-2$ } if (modelElementID != null) { synchronized(placeholderMap){ PlaceHolderEntry entry = placeholderMap.get(modelElementID); if(entry == null){ /** * Creating a new entry only when it is requested, otherwise this function works as */ if(toCreateNew){ entry = new PlaceHolderEntry(this); placeholderMap.put(modelElementID, entry); }else{ return null; } } return entry.getPlaceHolderFile(modelElementID, extension, requester, toCreateNew); } } return null; } /** * Removes an PlaceHolderProxyFile from the map. Also removes the PlaceHolderEntry if it doesnt have any * more instances left. * * @param modelElementID Must not be null, used as the key for the PlaceHolderMap * @param extension Currently can only be "oal" or "dsc" * @param requester Must not be null * @throws IllegalArgumentException if the extension is not knows */ public void releasePlaceholderFile(ModelElementID modelElementID, String extension, Object requester) { if(!isSupportedExtension(extension)){ throw new IllegalArgumentException("extension " + extension + " is not supported"); //$NON-NLS-1$ //$NON-NLS-2$ } PlaceHolderEntry entry = placeholderMap.get(modelElementID); if (entry != null) { synchronized(placeholderMap){ entry.releasePlaceHolderFile(extension, requester); if(entry.isSafeToRemove()){ placeholderMap.remove(modelElementID); } } } } /* * This method is only called on construction of the manager. It initializes * the place holders for all components that are already loaded. Later it is * job of state synchronizer to maintain the state on loading and unloading * of the components. */ private void initializeFor(PersistableModelComponent[] components){ for (int i = 0; i < components.length; i++) { initializeFor(components[i]); if(components[i].getChildrenCount() > 0){ Collection children = components[i].getChildren(); initializeFor((PersistableModelComponent[])children.toArray(new PersistableModelComponent[children.size()])); } } } private void initializeFor(PersistableModelComponent component){ if(!component.isLoaded()){ return; } IPath folderPath = component.getContainingDirectoryPath(); IResource members[] = null; //Getting all placeholder files IContainer container = ResourcesPlugin.getWorkspace().getRoot() .getFolder(folderPath); if(!container.exists()) { // this occurs when models are being converted from // single-file repository to multi-file repository return; } try { members = container.members(); } catch (CoreException e) { TextPlugin.logError("Exception while getting children of models folder",e); //$NON-NLS-1$ return; } //Loading only those whose modelRootId meets with id of this component. for (int j = 0 ; j < members.length; j++){ if (members[j] instanceof IFile){ IFile file = (IFile) members[j]; if(isSupportedExtension(file.getFileExtension()) && !isLoaded(file)){ ModelElementID modelElementID = null; try { modelElementID = ModelElementID.createInstance(file); } catch (CoreException e) { TextPlugin.logError("Can't load some required project files", e); //$NON-NLS-1$ } synchronized(placeholderMap){ if (modelElementID == null) { TextPlugin.logError("The memento data could not be loaded", null); //$NON-NLS-1$ } else if (modelElementID.getModelRootID().equals(component.getUniqueID())){ PlaceHolderEntry entry = placeholderMap.get(modelElementID); if(entry == null){ entry = new PlaceHolderEntry(PlaceHolderManager.this); placeholderMap.put(modelElementID, entry); } entry.init(file, modelElementID); } } } } } } private boolean isLoaded(IFile file){ return (findPlaceHolder(file) == null)?false:true; } private synchronized PlaceHolderFileProxy findPlaceHolder(IFile file){ Collection placeholders = placeholderMap.values(); synchronized(placeholders){ for (Iterator iterator = placeholders.iterator(); iterator.hasNext();) { PlaceHolderEntry entry = (PlaceHolderEntry) iterator.next(); PlaceHolderFileProxy[] placeHolders= entry.getPlaceHolderFiles(); for (int i = 0; i < placeHolders.length; i++) { if(placeHolders[i].equals(file)){ return placeHolders[i]; } } } } return null; } private boolean isReservedByMap(String proposedName, String ext){ String proposedFileName = proposedName + "." + ext; //$NON-NLS-1$ synchronized(placeholderMap){ for (Iterator iter = placeholderMap.values().iterator(); iter.hasNext();) { PlaceHolderEntry entry = (PlaceHolderEntry) iter.next(); IFile file = entry.getPlaceHolderFileFor(ext); if(file != null && file.getName().equals(proposedFileName)){ return false; } } } return true; } IFile getFileWithUniqueName(ModelElementID modelElementID, IPath path, String fileNameWithoutExt, String extension) { fileNameWithoutExt = getLegalFileName(fileNameWithoutExt); String proposedName = fileNameWithoutExt; int suffix = 1; // check if the given name is already reserved by some model element id boolean isUnique = false; while(!isUnique){ isUnique = isReservedByMap(proposedName, extension); if(!isUnique){ proposedName = fileNameWithoutExt + "." + suffix ; //$NON-NLS-1$ suffix++; } } String fullName = proposedName + "." + extension; //$NON-NLS-1$ // check if there is no file present on the given path with same name // this is possible as there might be place holder files, which are not // loaded because associated model is not loaded. // It is also possible that we require to create editor input(which requires // place holder file), whose model element is not currently avaiable (like if // project is close). We want to provide it with valid placeholderfile, which // may not be initialized, that is why we are not using resource api here. IPath filePath = null; while(true){ filePath = path.append(fullName); File file = filePath.toFile(); if(file.exists() && !modelElementID.isRepresentedBy(file)){ fullName = fileNameWithoutExt + "." + suffix + "." + extension; //$NON-NLS-1$ //$NON-NLS-2$ suffix++; }else{ break; } } IWorkspaceRoot workspaceRoot = ResourcesPlugin.getWorkspace().getRoot(); IPath rootPath = workspaceRoot.getLocation(); filePath = filePath.removeFirstSegments(filePath.matchingFirstSegments(rootPath)); return workspaceRoot.getFile(filePath); } static String illegalCharacters = "\\/:*?\"<>| "; //$NON-NLS-1$ static String getLegalFileName(String invalidName){ char[] nameChars = invalidName.toCharArray(); for (int i = 0; i < nameChars.length; i++) { if(illegalCharacters.indexOf(nameChars[i]) >= 0){ nameChars[i] = '_'; } } return new String(nameChars); } static boolean isSupportedExtension(String extension){ for (int i = 0; i < supportedExtensions.length; i++) { if(supportedExtensions[i].equals(extension)){ return true; } } return false; } class PlaceHolderStateSynchronizer extends AbstractModelElementListener implements IPreferenceChangeListener { protected void handleResourceMarkersChanged(IResourceDelta delta) { PlaceHolderFileProxy placeHolderFile = findPlaceHolder((IFile)delta.getResource()); if(placeHolderFile != null){ placeHolderFile.handleMarkersUpdated(delta); } } protected void handleComponentRenamed(PersistableModelComponent component, String oldName, String newName) { updatePlaceHolders(component); } private void updatePlaceHolders(PersistableModelComponent component){ Object[] entries = null; synchronized(placeholderMap){ entries = placeholderMap.entrySet().toArray(); } for (int i = 0; i < entries.length; i++) { Map.Entry entry = (Map.Entry)entries[i]; PlaceHolderEntry placeHolderEntry = (PlaceHolderEntry) entry.getValue(); if(placeHolderEntry != null && placeHolderEntry.getComponent() == component){ placeHolderEntry.updateName(); } } for (Iterator iterator = component.getChildren().iterator(); iterator.hasNext();) { PersistableModelComponent child = (PersistableModelComponent) iterator.next(); updatePlaceHolders(child); } } protected void handleModelElementAttributeChanged(ModelChangedEvent event, AttributeChangeModelDelta delta, ModelElementID changedModelElementID) { Object[] entries = null; synchronized(placeholderMap){ entries = placeholderMap.entrySet().toArray(); } /* * We still need to iterate the PlaceHolderMap, to update names of the any other model * element. All entries are iterated so that a interdependent names are updated for all * effected entries. */ for (int i = 0; i < entries.length; i++) { Map.Entry entry = (Map.Entry)entries[i]; PlaceHolderEntry placeHolderEntry = (PlaceHolderEntry) entry.getValue(); if(placeHolderEntry != null && placeHolderEntry.getModelRoot() == event.getSourceModelRoot()){ placeHolderEntry.updateName(); } } } protected void handleModelElementDeleted(ModelChangedEvent event, IModelDelta delta, ModelElementID deletedModelElementID) { Object[] placeHolderEntries = null; synchronized(placeholderMap){ placeHolderEntries = placeholderMap.values().toArray(); } for (int index = 0; index < placeHolderEntries.length; index++) { PlaceHolderEntry placeHolderEntry = (PlaceHolderEntry) placeHolderEntries[index]; if (placeHolderEntry.getModelElementID().equals(deletedModelElementID)){ try { synchronized(placeholderMap){ placeholderMap.remove(placeHolderEntry.getModelElementID()); placeHolderEntry.dispose(); } } catch (CoreException e) { TextPlugin.logError(e.getMessage(), e); } break; } else{ //Finding against the required model element id PlaceHolderFileProxy[] files = placeHolderEntry.getPlaceHolderFiles(); for (int i = 0; i < files.length; i++) { ModelElementID requireModelElement = files[i].getRequiredModelElementID(); if(requireModelElement != null && requireModelElement.equals(deletedModelElementID)){ files[i].dispose(); } } synchronized(placeholderMap){ if(placeHolderEntry.isSafeToRemove()){ placeholderMap.remove(placeHolderEntry.getModelElementID()); } } } } } /** * Initializes the PlaceHolderMap and laods any existing placeholder files. */ protected void handleComponentLoaded(ModelChangedEvent event) { PersistableModelComponent component = PersistenceManager .getDefaultInstance().getComponent((NonRootModelElement)event.getModelElement()); if(component == null){ return; } initializeFor(component); } protected void handleComponentUnloaded(ModelChangedEvent event) { ModelElement me = event.getModelElement(); if(!(me instanceof NonRootModelElement)){ return; } PersistableModelComponent component = PersistenceManager.getComponent((NonRootModelElement)me); if(component == null){ return; } Object[] placeHolderEntries = null; synchronized(placeholderMap){ placeHolderEntries = placeholderMap.values().toArray(); } String compDirPath = component.getFullPath().toString(); for (int index = 0; index < placeHolderEntries.length; index++) { PlaceHolderEntry placeHolderEntry = (PlaceHolderEntry) placeHolderEntries[index]; if(compDirPath.equals(placeHolderEntry.getModelElementID().getComponentPath())){ try { synchronized(placeholderMap){ placeHolderEntry.dispose(); placeholderMap.remove(placeHolderEntry.getModelElementID()); } } catch (CoreException e) { TextPlugin.logError("Error while disposing place holder entry", e); //$NON-NLS-1$ } } } } @Override public void systemAboutToBeDisabled(SystemModel_c system) { Object[] placeHolderEntries = null; synchronized(placeholderMap){ placeHolderEntries = placeholderMap.values().toArray(); } for (int index = 0; index < placeHolderEntries.length; index++) { PlaceHolderEntry placeHolderEntry = (PlaceHolderEntry) placeHolderEntries[index]; if(placeHolderEntry.getModelElementID().getModelRoot().getRoot() == system) { try { synchronized(placeholderMap){ placeHolderEntry.dispose(); placeholderMap.remove(placeHolderEntry.getModelElementID()); } } catch (CoreException e) { TextPlugin.logError("Error while disposing place holder entry", e); //$NON-NLS-1$ } } } } protected void handleModelReloaded(ModelChangedEvent event) {} @Override public void preferenceChange(PreferenceChangeEvent event) { if (event.getKey() == BridgePointProjectActionLanguagePreferences.ENABLE_ERROR_FOR_EMPTY_SYNCHRONOUS_MESSAGE) { Object newValue = event.getNewValue(); if (newValue instanceof String) { boolean enabled = Boolean.valueOf(newValue.toString()); if (!enabled) { if (event.getSource() instanceof ProjectPreferences) { String absolutePath = ((ProjectPreferences) event .getSource()).absolutePath(); Path path = new Path(absolutePath); if(path.segmentCount() > 1) { String projectName = path.segments()[1]; IProject project = ResourcesPlugin.getWorkspace() .getRoot().getProject(projectName); reparseSynchronousMessages(project); } } } } } if (event.getKey() == BridgePointProjectActionLanguagePreferences.ENABLE_ERROR_FOR_EMPTY_SYNCHRONOUS_MESSAGE_REALIZED) { Object newValue = event.getNewValue(); if (newValue instanceof String) { boolean enabled = Boolean.valueOf(newValue.toString()); if (!enabled) { if (event.getSource() instanceof ProjectPreferences) { String absolutePath = ((ProjectPreferences) event .getSource()).absolutePath(); Path path = new Path(absolutePath); if(path.segmentCount() > 1) { String projectName = path.segments()[1]; IProject project = ResourcesPlugin.getWorkspace() .getRoot().getProject(projectName); reparseRealizedSynchronousMessages(project); } } } } } } private void reparseSynchronousMessages(IProject project) { reparseRealizedAndNonRealizedSynchronousMessages(project, false); } private void reparseRealizedSynchronousMessages(IProject project) { reparseRealizedAndNonRealizedSynchronousMessages(project, true); } private void reparseRealizedAndNonRealizedSynchronousMessages(IProject project, boolean checkRealized) { IMarker[] markers = new IMarker[0]; try { markers = project.findMarkers(Marker.PROBLEM, true, IResource.DEPTH_INFINITE); } catch (CoreException e) { CorePlugin .logError( "Unable to locate problem markers to clear parse errors.", e); } // note that we must do this from the markers rather than use the // local place holder map as on restart the map is not populated // until a place holder file is requested // // we need a map as we parse at one time rather than multiple times // and the parse process requires a model root which there can be // many final HashMap<ModelRoot, List<NonRootModelElement>> elementsToParse = new HashMap<ModelRoot, List<NonRootModelElement>>(); final List<IMarker> markersToDelete = new ArrayList<IMarker>(); for(IMarker marker : markers) { if(marker.getResource() instanceof IFile) { IFile resource = (IFile) marker.getResource(); // if the resource does not exist skip if(!resource.exists()) { continue; } ModelElementID key = null; try { key = ModelElementID.createInstance(resource); } catch (CoreException e) { // do not log an error here, this will most likely mean // that we are looking at a problem marker that is not // related to parse issues } if(key != null) { key.resolve(); String className = key.getType(); final Ooaofooa modelRoot = key.getModelRoot(); Object[] ids = new Object[key.getIdCount()]; for(int i = 0; i < key.getIdCount(); i++) { ids[i] = UUID.fromString(key.getId(i)); } try { final NonRootModelElement element = (NonRootModelElement) modelRoot .getInstanceList(Class.forName(className)).get(ids); if(element != null) { boolean synchronousMessage = isSynchronousMessage(element); if(synchronousMessage) { boolean reparse = false; boolean realized = isRealizedElement(element); if(realized && checkRealized) { // reparse this element reparse = true; } else { // reparse this element reparse = true; } if(reparse) { List<NonRootModelElement> list = elementsToParse.get(modelRoot); if(list == null) { list = new ArrayList<NonRootModelElement>(); } list.add(element); elementsToParse.put(modelRoot, list); markersToDelete.add(marker); } } } } catch (ClassNotFoundException e) { CorePlugin .logError( "Unable to locate element class from given model element id.", e); } } } } if (!elementsToParse.isEmpty()) { BusyIndicator.showWhile(PlatformUI.getWorkbench().getDisplay(), new Runnable() { @Override public void run() { // clear all markers that are going to be // reparsed AbstractModelElementListener.setIgnoreResourceChangesMarker(true); try { WorkspaceJob job = new WorkspaceJob("Remove problem markers") { @Override public IStatus runInWorkspace(IProgressMonitor monitor) throws CoreException { for(IMarker marker : markersToDelete) { try { if(marker.exists()) { marker.delete(); // also remove the oal file associated marker.getResource() .delete(true, new NullProgressMonitor()); } } catch (CoreException e) { CorePlugin.logError("Unable to delete marker.", e); } } return Status.OK_STATUS; } }; job.setRule(ResourcesPlugin.getWorkspace().getRoot()); job.schedule(); } finally { AbstractModelElementListener.setIgnoreResourceChangesMarker(false); } AbstractModelElementListener.setIgnoreResourceChangesMarker(false); Set<ModelRoot> keySet = elementsToParse .keySet(); for (Iterator<ModelRoot> iterator = keySet .iterator(); iterator.hasNext();) { ModelRoot modelRoot = iterator.next(); AllActivityModifier aam = new AllActivityModifier( modelRoot, elementsToParse.get( modelRoot).toArray(), new NullProgressMonitor()); aam.processAllActivities(IAllActivityModifier.PARSE); } } }); } } private boolean isRealizedElement(NonRootModelElement element) { // is realized if contained in a component that // is realized UUID componentId = IdAssigner.NULL_UUID; if (element instanceof Operation_c) { componentId = ((Operation_c) element) .Getcontainingcomponentid(); } if (element instanceof Bridge_c) { componentId = ((Bridge_c) element).Getcontainingcomponentid(); } if (element instanceof Function_c) { componentId = ((Function_c) element).Getcontainingcomponentid(); } if (element instanceof RequiredOperation_c) { componentId = ((RequiredOperation_c) element).Getcomponentid(); } if (element instanceof ProvidedOperation_c) { componentId = ((ProvidedOperation_c) element).Getcomponentid(); } if (componentId.equals(IdAssigner.NULL_UUID)) { return false; } Component_c component = (Component_c) element.getModelRoot() .getInstanceList(Component_c.class).get(componentId); if (component != null && component.getIsrealized()) { return true; } // otherwise it can be realized if a bridge with an EE // that has its realized path value set if (element instanceof Bridge_c) { ExternalEntity_c ee = ExternalEntity_c .getOneS_EEOnR19((Bridge_c) element); return ee.getIsrealized(); } return false; } private boolean isSynchronousMessage(NonRootModelElement element) { if (element instanceof Operation_c) { return true; } if (element instanceof Bridge_c) { return true; } if (element instanceof Function_c) { return true; } if (element instanceof RequiredOperation_c) { return true; } if (element instanceof ProvidedOperation_c) { return true; } return false; } } /** * @see PlaceHolderManager#reWritePlaceHolders(ArrayList list) */ static class PlaceHolderFileReWriter implements Runnable{ List filesToReWrite; PlaceHolderFileReWriter(ArrayList placeHolderFiles){ filesToReWrite = (List)placeHolderFiles.clone(); } public void run(){ IWorkspace workspace = ResourcesPlugin.getWorkspace(); IResourceRuleFactory ruleFactory = workspace.getRuleFactory(); ISchedulingRule[] rules = new ISchedulingRule[filesToReWrite.size()]; for (int i = 0; i < filesToReWrite.size(); i++) { rules[i] = ((PlaceHolderFileProxy)filesToReWrite.get(i)).getReCreateRule(ruleFactory); } MultiRule batchUpdationRule = new MultiRule(rules); try { workspace.run(new IWorkspaceRunnable(){ public void run(IProgressMonitor monitor){ monitor.setTaskName("Updating Place holders"); reWriteAll(filesToReWrite); } }, batchUpdationRule, 0, null); } catch (CoreException e) { TextPlugin.logError("Error while recreating place holder with new root id", e); //$NON-NLS-1$ } } private static void reWriteAll(List finalList){ for (int i = 0; i < finalList.size(); i++) { try{ ((PlaceHolderFileProxy)finalList.get(i)).reCreate(); }catch (CoreException e) { TextPlugin.logError("Error while recreating place holder with new root id", e); //$NON-NLS-1$ } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.engine; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicLong; import org.apache.camel.AsyncProcessor; import org.apache.camel.Exchange; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.ExtendedExchange; import org.apache.camel.RuntimeCamelException; import org.apache.camel.StaticService; import org.apache.camel.spi.AsyncProcessorAwaitManager; import org.apache.camel.spi.ExchangeFormatter; import org.apache.camel.spi.ReactiveExecutor; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.MessageHelper; import org.apache.camel.support.processor.DefaultExchangeFormatter; import org.apache.camel.support.service.ServiceSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DefaultAsyncProcessorAwaitManager extends ServiceSupport implements AsyncProcessorAwaitManager, StaticService { private static final Logger LOG = LoggerFactory.getLogger(DefaultAsyncProcessorAwaitManager.class); private final AsyncProcessorAwaitManager.Statistics statistics = new UtilizationStatistics(); private final AtomicLong blockedCounter = new AtomicLong(); private final AtomicLong interruptedCounter = new AtomicLong(); private final AtomicLong totalDuration = new AtomicLong(); private final AtomicLong minDuration = new AtomicLong(); private final AtomicLong maxDuration = new AtomicLong(); private final AtomicLong meanDuration = new AtomicLong(); private final Map<Exchange, AwaitThread> inflight = new ConcurrentHashMap<>(); private final ExchangeFormatter exchangeFormatter; private boolean interruptThreadsWhileStopping = true; public DefaultAsyncProcessorAwaitManager() { // setup exchange formatter to be used for message history dump DefaultExchangeFormatter formatter = new DefaultExchangeFormatter(); formatter.setShowExchangeId(true); formatter.setMultiline(true); formatter.setShowHeaders(true); formatter.setStyle(DefaultExchangeFormatter.OutputStyle.Fixed); this.exchangeFormatter = formatter; } /** * Calls the async version of the processor's process method and waits * for it to complete before returning. This can be used by {@link AsyncProcessor} * objects to implement their sync version of the process method. * <p/> * <b>Important:</b> This method is discouraged to be used, as its better to invoke the asynchronous * {@link AsyncProcessor#process(org.apache.camel.Exchange, org.apache.camel.AsyncCallback)} method, whenever possible. * * @param processor the processor * @param exchange the exchange */ @Override public void process(final AsyncProcessor processor, final Exchange exchange) { CountDownLatch latch = new CountDownLatch(1); processor.process(exchange, doneSync -> countDown(exchange, latch)); if (latch.getCount() > 0) { await(exchange, latch); } } public void await(Exchange exchange, CountDownLatch latch) { ReactiveExecutor reactiveExecutor = exchange.getContext().adapt(ExtendedCamelContext.class).getReactiveExecutor(); // Early exit for pending reactive queued work do { if (latch.getCount() <= 0) { return; } } while (reactiveExecutor.executeFromQueue()); if (LOG.isTraceEnabled()) { LOG.trace("Waiting for asynchronous callback before continuing for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } try { if (statistics.isStatisticsEnabled()) { blockedCounter.incrementAndGet(); } inflight.put(exchange, new AwaitThreadEntry(Thread.currentThread(), exchange, latch)); latch.await(); if (LOG.isTraceEnabled()) { LOG.trace("Asynchronous callback received, will continue routing exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } } catch (InterruptedException e) { if (LOG.isTraceEnabled()) { LOG.trace("Interrupted while waiting for callback, will continue routing exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } exchange.setException(e); } finally { AwaitThread thread = inflight.remove(exchange); if (statistics.isStatisticsEnabled() && thread != null) { long time = thread.getWaitDuration(); long total = totalDuration.get() + time; totalDuration.set(total); if (time < minDuration.get()) { minDuration.set(time); } else if (time > maxDuration.get()) { maxDuration.set(time); } // update mean long count = blockedCounter.get(); long mean = count > 0 ? total / count : 0; meanDuration.set(mean); } } } public void countDown(Exchange exchange, CountDownLatch latch) { if (LOG.isTraceEnabled()) { LOG.trace("Asynchronous callback received for exchangeId: {}", exchange.getExchangeId()); } latch.countDown(); } @Override public int size() { return inflight.size(); } @Override public Collection<AwaitThread> browse() { return Collections.unmodifiableCollection(inflight.values()); } @Override public void interrupt(String exchangeId) { // need to find the exchange with the given exchange id Exchange found = null; for (AsyncProcessorAwaitManager.AwaitThread entry : browse()) { Exchange exchange = entry.getExchange(); if (exchangeId.equals(exchange.getExchangeId())) { found = exchange; break; } } if (found != null) { interrupt(found); } } @Override public void interrupt(Exchange exchange) { AwaitThreadEntry entry = (AwaitThreadEntry) inflight.get(exchange); if (entry != null) { try { StringBuilder sb = new StringBuilder(); sb.append("Interrupted while waiting for asynchronous callback, will release the following blocked thread which was waiting for exchange to finish processing with exchangeId: "); sb.append(exchange.getExchangeId()); sb.append("\n"); sb.append(dumpBlockedThread(entry)); // dump a route stack trace of the exchange String routeStackTrace = MessageHelper.dumpMessageHistoryStacktrace(exchange, exchangeFormatter, false); if (routeStackTrace != null) { sb.append(routeStackTrace); } LOG.warn(sb.toString()); } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } finally { if (statistics.isStatisticsEnabled()) { interruptedCounter.incrementAndGet(); } exchange.setException(new RejectedExecutionException("Interrupted while waiting for asynchronous callback for exchangeId: " + exchange.getExchangeId())); exchange.adapt(ExtendedExchange.class).setInterrupted(true); entry.getLatch().countDown(); } } } @Override public boolean isInterruptThreadsWhileStopping() { return interruptThreadsWhileStopping; } @Override public void setInterruptThreadsWhileStopping(boolean interruptThreadsWhileStopping) { this.interruptThreadsWhileStopping = interruptThreadsWhileStopping; } @Override public Statistics getStatistics() { return statistics; } @Override protected void doStop() throws Exception { Collection<AwaitThread> threads = browse(); int count = threads.size(); if (count > 0) { LOG.warn("Shutting down while there are still {} inflight threads currently blocked.", count); StringBuilder sb = new StringBuilder(); for (AwaitThread entry : threads) { sb.append(dumpBlockedThread(entry)); } if (isInterruptThreadsWhileStopping()) { LOG.warn("The following threads are blocked and will be interrupted so the threads are released:\n{}", sb); for (AwaitThread entry : threads) { try { interrupt(entry.getExchange()); } catch (Throwable e) { LOG.warn("Error while interrupting thread: " + entry.getBlockedThread().getName() + ". This exception is ignored.", e); } } } else { LOG.warn("The following threads are blocked, and may reside in the JVM:\n{}", sb); } } else { LOG.debug("Shutting down with no inflight threads."); } inflight.clear(); } private static String dumpBlockedThread(AwaitThread entry) { StringBuilder sb = new StringBuilder(); sb.append("\n"); sb.append("Blocked Thread\n"); sb.append("---------------------------------------------------------------------------------------------------------------------------------------\n"); sb.append(style("Id:")).append(entry.getBlockedThread().getId()).append("\n"); sb.append(style("Name:")).append(entry.getBlockedThread().getName()).append("\n"); sb.append(style("RouteId:")).append(safeNull(entry.getRouteId())).append("\n"); sb.append(style("NodeId:")).append(safeNull(entry.getNodeId())).append("\n"); sb.append(style("Duration:")).append(entry.getWaitDuration()).append(" msec.\n"); return sb.toString(); } private static String style(String label) { return String.format("\t%-20s", label); } private static String safeNull(Object value) { return value != null ? value.toString() : ""; } private static final class AwaitThreadEntry implements AwaitThread { private final Thread thread; private final Exchange exchange; private final CountDownLatch latch; private final long start; private AwaitThreadEntry(Thread thread, Exchange exchange, CountDownLatch latch) { this.thread = thread; this.exchange = exchange; this.latch = latch; this.start = System.currentTimeMillis(); } @Override public Thread getBlockedThread() { return thread; } @Override public Exchange getExchange() { return exchange; } @Override public long getWaitDuration() { return System.currentTimeMillis() - start; } @Override public String getRouteId() { return ExchangeHelper.getAtRouteId(exchange); } @Override public String getNodeId() { return exchange.adapt(ExtendedExchange.class).getHistoryNodeId(); } public CountDownLatch getLatch() { return latch; } @Override public String toString() { return "AwaitThreadEntry[name=" + thread.getName() + ", exchangeId=" + exchange.getExchangeId() + "]"; } } /** * Represents utilization statistics */ private final class UtilizationStatistics implements AsyncProcessorAwaitManager.Statistics { private boolean statisticsEnabled; @Override public long getThreadsBlocked() { return blockedCounter.get(); } @Override public long getThreadsInterrupted() { return interruptedCounter.get(); } @Override public long getTotalDuration() { return totalDuration.get(); } @Override public long getMinDuration() { return minDuration.get(); } @Override public long getMaxDuration() { return maxDuration.get(); } @Override public long getMeanDuration() { return meanDuration.get(); } @Override public void reset() { blockedCounter.set(0); interruptedCounter.set(0); totalDuration.set(0); minDuration.set(0); maxDuration.set(0); meanDuration.set(0); } @Override public boolean isStatisticsEnabled() { return statisticsEnabled; } @Override public void setStatisticsEnabled(boolean statisticsEnabled) { this.statisticsEnabled = statisticsEnabled; } @Override public String toString() { return String.format("AsyncProcessAwaitManager utilization[blocked=%s, interrupted=%s, total=%s min=%s, max=%s, mean=%s]", getThreadsBlocked(), getThreadsInterrupted(), getTotalDuration(), getMinDuration(), getMaxDuration(), getMeanDuration()); } } }
/* * The MIT License (MIT) * * Copyright (c) 1998-2015 Aspose Pty Ltd. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.aspose.words.maven.utils; import com.aspose.words.maven.AsposeMavenProjectWizardIterator; import com.aspose.words.maven.artifacts.Metadata; import com.aspose.words.maven.artifacts.ObjectFactory; import com.aspose.words.maven.examples.AsposeExamplePanel; import com.aspose.words.maven.examples.CustomMutableTreeNode; import org.w3c.dom.Document; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import java.util.List; import javax.swing.*; import javax.xml.bind.JAXBContext; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.stream.StreamSource; import javax.xml.xpath.*; import java.io.*; import java.net.*; import java.util.ArrayList; import java.util.LinkedList; import java.util.Queue; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreePath; import javax.xml.bind.JAXBException; import org.openide.WizardDescriptor; import org.openide.awt.StatusDisplayer; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; import org.openide.util.Exceptions; import org.openide.util.NbBundle; import org.openide.xml.XMLUtil; import org.w3c.dom.Node; /* * @author Adeel Ilyas <adeel.ilyas@aspose.com> * Date: 12/21/2015 * */ /** * * @author Adeel */ public class AsposeMavenProjectManager { private boolean examplesNotAvailable; private File projectDir = null; /** * * @return */ public File getProjectDir() { return projectDir; } private boolean examplesDefinitionAvailable; /** * * @param Url * @return * @throws IOException */ public String readURLContents(String Url) throws IOException { URL url = new URL(Url); URLConnection con = url.openConnection(); InputStream in = con.getInputStream(); String encoding = con.getContentEncoding(); encoding = encoding == null ? "UTF-8" : encoding; ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buf = new byte[8192]; int len = 0; while ((len = in.read(buf)) != -1) { baos.write(buf, 0, len); } String body = new String(baos.toByteArray(), encoding); return body; } /** * * @param productMavenRepositoryUrl * @return */ public Metadata getProductMavenDependency(String productMavenRepositoryUrl) { final String mavenMetaDataFileName = "maven-metadata.xml"; Metadata data = null; try { String productMavenInfo; productMavenInfo = readURLContents(productMavenRepositoryUrl + mavenMetaDataFileName); JAXBContext jaxbContext = JAXBContext.newInstance(ObjectFactory.class); Unmarshaller unmarshaller; unmarshaller = jaxbContext.createUnmarshaller(); data = (Metadata) unmarshaller.unmarshal(new StreamSource(new StringReader(productMavenInfo))); String remoteArtifactFile = productMavenRepositoryUrl + data.getVersioning().getLatest() + "/" + data.getArtifactId() + "-" + data.getVersioning().getLatest(); if (!remoteFileExists(remoteArtifactFile + ".jar")) { AsposeConstants.println("Not Exists"); data.setClassifier(getResolveSupportedJDK(remoteArtifactFile)); } else { AsposeConstants.println("Exists"); } } catch (IOException | JAXBException ex) { Exceptions.printStackTrace(ex); data = null; } return data; } /** * * @param ProductURL * @return */ public String getResolveSupportedJDK(String ProductURL) { String supportedJDKs[] = {"jdk17", "jdk16", "jdk15", "jdk14", "jdk18"}; String classifier = null; for (String jdkCheck : supportedJDKs) { if (remoteFileExists(ProductURL + "-" + jdkCheck + ".jar")) { AsposeConstants.println("Exists"); classifier = jdkCheck; break; } else { AsposeConstants.println("Not Exists"); } } return classifier; } /** * * @param URLName * @return */ public boolean remoteFileExists(String URLName) { try { HttpURLConnection.setFollowRedirects(false); // note : you may also need // HttpURLConnection.setInstanceFollowRedirects(false) HttpURLConnection con = (HttpURLConnection) new URL(URLName).openConnection(); con.setRequestMethod("HEAD"); return (con.getResponseCode() == HttpURLConnection.HTTP_OK); } catch (Exception e) { Exceptions.printStackTrace(e); return false; } } /** * * @param asposeAPI * @return */ public AbstractTask retrieveAsposeAPIMavenTask(final AsposeJavaAPI asposeAPI) { return new AbstractTask(NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "AsposeManager.progressTitle")) { @Override public void run() { String progressMsg = NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "AsposeManager.progressMessage"); p.progress(progressMsg); StatusDisplayer.getDefault().setStatusText(progressMsg); p.start(100); p.progress(50); retrieveAsposeMavenDependencies(); StatusDisplayer.getDefault().setStatusText(progressMsg); p.progress(100); p.finish(); } }; } /** * * @param asposeAPI * @return */ public AbstractTask createDownloadExamplesTask(final AsposeJavaAPI asposeAPI) { return new AbstractTask(NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "AsposeManager.progressExamplesTitle")) { @Override public void run() { String downloadExamplesMessage = NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "AsposeManager.downloadExamplesMessage"); p.progress(downloadExamplesMessage); StatusDisplayer.getDefault().setStatusText(downloadExamplesMessage); p.start(100); p.progress(50); asposeAPI.downloadExamples(p); p.progress(downloadExamplesMessage); p.progress(100); p.finish(); } }; } /** * * @param asposeAPI * @param panel * @return */ public Runnable populateExamplesTask(final AsposeJavaAPI asposeAPI, final AsposeExamplePanel panel) { return new Runnable() { @Override public void run() { final CustomMutableTreeNode top = new CustomMutableTreeNode(""); DefaultTreeModel model = (DefaultTreeModel) panel.getExamplesTree().getModel(); model.setRoot(top); model.reload(top); AsposeJavaAPI component = AsposeWordsJavaAPI.getInstance(); if (component.isExamplesDefinitionAvailable()) { populateExamplesTree(component, top, panel); } top.setTopTreeNodeText(AsposeConstants.API_NAME); model.setRoot(top); model.reload(top); panel.getExamplesTree().expandPath(new TreePath(top.getPath())); } }; } /** * * @return */ public boolean retrieveAsposeMavenDependencies() { try { getAsposeProjectMavenDependencies().clear(); AsposeJavaAPI component = AsposeWordsJavaAPI.getInstance(); Metadata productMavenDependency = getProductMavenDependency(component.get_mavenRepositoryURL()); if (productMavenDependency != null) { getAsposeProjectMavenDependencies().add(productMavenDependency); } } catch (Exception rex) { Exceptions.printStackTrace(rex); return false; } return !getAsposeProjectMavenDependencies().isEmpty(); } /** * * @return */ public static boolean isInternetConnected() { try { InetAddress address = InetAddress.getByName(AsposeConstants.INTERNET_CONNNECTIVITY_PING_URL); if (address == null) { return false; } } catch (UnknownHostException e) { Exceptions.printStackTrace(e); return false; } return true; } /** * * @param title * @param message * @param buttons * @param icon * @return */ public static int showMessage(String title, String message, int buttons, int icon) { int result = JOptionPane.showConfirmDialog(null, message, title, buttons, icon); return result; } private Document getXmlDocument(String mavenPomXmlfile) throws ParserConfigurationException, SAXException, IOException { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document pomDocument = docBuilder.parse(mavenPomXmlfile); return pomDocument; } /** * * @param dependencyName * @return */ public String getDependencyVersionFromPOM(String dependencyName) { try { String mavenPomXmlfile = projectDir.getPath() + File.separator + AsposeConstants.MAVEN_POM_XML; Document pomDocument = getXmlDocument(mavenPomXmlfile); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); String expression = "//version[ancestor::dependency/artifactId[text()='" + dependencyName + "']]"; XPathExpression xPathExpr = xpath.compile(expression); NodeList nl = (NodeList) xPathExpr.evaluate(pomDocument, XPathConstants.NODESET); if (nl != null && nl.getLength() > 0) { return nl.item(0).getTextContent(); } } catch (IOException | ParserConfigurationException | SAXException | XPathExpressionException e) { Exceptions.printStackTrace(e); } return null; } /** * * @return */ public String getAsposeHomePath() { return System.getProperty("user.home") + File.separator + "aspose" + File.separator; } /** * * @param sourceLocation * @param targetLocation * @throws IOException */ public static void copyDirectory(String sourceLocation, String targetLocation) throws IOException { checkAndCreateFolder(targetLocation); copyDirectory(new File(sourceLocation + File.separator), new File(targetLocation + File.separator)); } /** * * @param sourceLocation * @param targetLocation * @throws IOException */ public static void copyDirectory(File sourceLocation, File targetLocation) throws IOException { if (sourceLocation.isDirectory()) { if (!targetLocation.exists()) { targetLocation.mkdir(); } String[] children = sourceLocation.list(); for (String children1 : children) { copyDirectory(new File(sourceLocation, children1), new File(targetLocation, children1)); } } else { OutputStream out; try (InputStream in = new FileInputStream(sourceLocation)) { out = new FileOutputStream(targetLocation); // Copy the bits from instream to outstream byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } } out.close(); } } /** * * @param folderPath */ public static void checkAndCreateFolder(String folderPath) { File folder = new File(folderPath); if (!folder.exists()) { folder.mkdirs(); } } // Singleton instance private static AsposeMavenProjectManager asposeMavenProjectManager = new AsposeMavenProjectManager(); /** * * @return */ public static AsposeMavenProjectManager getInstance() { return asposeMavenProjectManager; } /** * * @param wiz * @return */ public static AsposeMavenProjectManager initialize(WizardDescriptor wiz) { asposeMavenProjectManager = new AsposeMavenProjectManager(); asposeMavenProjectManager.projectDir = FileUtil.normalizeFile((File) wiz.getProperty("projdir")); return asposeMavenProjectManager; } private AsposeMavenProjectManager() { } /** * * @return */ public static List<Metadata> getAsposeProjectMavenDependencies() { return asposeProjectMavenDependencies; } /** * */ public static void clearAsposeProjectMavenDependencies() { asposeProjectMavenDependencies.clear(); } private static final List<Metadata> asposeProjectMavenDependencies = new ArrayList<Metadata>(); /** * * @param addTheseDependencies */ public void addMavenDependenciesInProject(NodeList addTheseDependencies) { String mavenPomXmlfile = projectDir.getPath() + File.separator + AsposeConstants.MAVEN_POM_XML; try { Document pomDocument = getXmlDocument(mavenPomXmlfile); Node dependenciesNode = pomDocument.getElementsByTagName("dependencies").item(0); if (addTheseDependencies != null && addTheseDependencies.getLength() > 0) { for (int n = 0; n < addTheseDependencies.getLength(); n++) { String artifactId = addTheseDependencies.item(n).getFirstChild().getNextSibling().getNextSibling().getNextSibling().getFirstChild().getNodeValue(); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); String expression = "//artifactId[text()='" + artifactId + "']"; XPathExpression xPathExpr = xpath.compile(expression); Node dependencyAlreadyExist = (Node) xPathExpr.evaluate(pomDocument, XPathConstants.NODE); if (dependencyAlreadyExist != null) { Node dependencies = pomDocument.getElementsByTagName("dependencies").item(0); dependencies.removeChild(dependencyAlreadyExist.getParentNode()); } Node importedNode = pomDocument.importNode(addTheseDependencies.item(n), true); dependenciesNode.appendChild(importedNode); } } removeEmptyLinesfromDOM(pomDocument); writeToPOM(pomDocument); } catch (ParserConfigurationException | SAXException | XPathExpressionException | IOException ex) { Exceptions.printStackTrace(ex); } } /** * * @param addTheseRepositories */ public void addMavenRepositoriesInProject(NodeList addTheseRepositories) { String mavenPomXmlfile = projectDir.getPath() + File.separator + AsposeConstants.MAVEN_POM_XML; try { Document pomDocument = getXmlDocument(mavenPomXmlfile); Node repositoriesNode = pomDocument.getElementsByTagName("repositories").item(0); if (addTheseRepositories != null && addTheseRepositories.getLength() > 0) { for (int n = 0; n < addTheseRepositories.getLength(); n++) { String repositoryId = addTheseRepositories.item(n).getFirstChild().getNextSibling().getFirstChild().getNodeValue(); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); String expression = "//id[text()='" + repositoryId + "']"; XPathExpression xPathExpr = xpath.compile(expression); Boolean repositoryAlreadyExist = (Boolean) xPathExpr.evaluate(pomDocument, XPathConstants.BOOLEAN); if (!repositoryAlreadyExist) { Node importedNode = pomDocument.importNode(addTheseRepositories.item(n), true); repositoriesNode.appendChild(importedNode); } } } removeEmptyLinesfromDOM(pomDocument); writeToPOM(pomDocument); } catch (XPathExpressionException | SAXException | ParserConfigurationException | IOException ex) { Exceptions.printStackTrace(ex); } } /** * * @param pomDocument * @throws IOException */ public void writeToPOM(Document pomDocument) throws IOException { FileObject projectRoot = FileUtil.toFileObject(projectDir); FileObject fo = FileUtil.createData(projectRoot, AsposeConstants.MAVEN_POM_XML); try (OutputStream out = fo.getOutputStream()) { XMLUtil.write(pomDocument, out, "UTF-8"); } } /** * * @param mavenPomXmlfile * @param excludeGroup * @return */ public NodeList getDependenciesFromPOM(String mavenPomXmlfile, String excludeGroup) { try { Document pomDocument = getXmlDocument(mavenPomXmlfile); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); String expression = "//dependency[child::groupId[text()!='" + excludeGroup + "']]"; XPathExpression xPathExpr = xpath.compile(expression); NodeList nl = (NodeList) xPathExpr.evaluate(pomDocument, XPathConstants.NODESET); if (nl != null && nl.getLength() > 0) { return nl; } } catch (IOException | ParserConfigurationException | SAXException | XPathExpressionException e) { Exceptions.printStackTrace(e); } return null; } /** * * @param mavenPomXmlfile * @param excludeURL * @return */ public NodeList getRepositoriesFromPOM(String mavenPomXmlfile, String excludeURL) { try { Document pomDocument = getXmlDocument(mavenPomXmlfile); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); String expression = "//repository[child::url[not(starts-with(.,'" + excludeURL + "'))]]"; XPathExpression xPathExpr = xpath.compile(expression); NodeList nl = (NodeList) xPathExpr.evaluate(pomDocument, XPathConstants.NODESET); if (nl != null && nl.getLength() > 0) { return nl; } } catch (IOException | ParserConfigurationException | SAXException | XPathExpressionException e) { Exceptions.printStackTrace(e); } return null; } private void removeEmptyLinesfromDOM(Document doc) throws XPathExpressionException { XPath xp = XPathFactory.newInstance().newXPath(); NodeList nl = (NodeList) xp.evaluate("//text()[normalize-space(.)='']", doc, XPathConstants.NODESET); for (int i = 0; i < nl.getLength(); ++i) { Node node = nl.item(i); node.getParentNode().removeChild(node); } } /** * * @param asposeComponent * @param top * @param panel */ public void populateExamplesTree(AsposeJavaAPI asposeComponent, CustomMutableTreeNode top, AsposeExamplePanel panel) { String examplesFullPath = asposeComponent.getLocalRepositoryPath() + File.separator + AsposeConstants.GITHUB_EXAMPLES_SOURCE_LOCATION; File directory = new File(examplesFullPath); panel.getExamplesTree().removeAll(); top.setExPath(examplesFullPath); Queue<Object[]> queue = new LinkedList<>(); queue.add(new Object[]{null, directory}); while (!queue.isEmpty()) { Object[] _entry = queue.remove(); File childFile = ((File) _entry[1]); CustomMutableTreeNode parentItem = ((CustomMutableTreeNode) _entry[0]); if (childFile.isDirectory()) { if (parentItem != null) { CustomMutableTreeNode child = new CustomMutableTreeNode(FormatExamples.formatTitle(childFile.getName())); child.setExPath(childFile.getAbsolutePath()); child.setFolder(true); parentItem.add(child); parentItem = child; } else { parentItem = top; } for (File f : childFile.listFiles()) { queue.add(new Object[]{parentItem, f}); } } else if (childFile.isFile()) { String title = FormatExamples.formatTitle(childFile.getName()); CustomMutableTreeNode child = new CustomMutableTreeNode(title); child.setFolder(false); parentItem.add(child); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.html.link; import java.awt.Shape; import java.awt.geom.PathIterator; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.html.image.Image; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.markup.repeater.RepeatingView; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; /** * A client-side image map implementation which allows you to "attach" the map to any existing * {@link Image} component. * * @since 1.5 */ public class ClientSideImageMap extends Panel { private static final long serialVersionUID = 1L; private static final String CIRCLE = "circle"; private static final String POLYGON = "polygon"; private static final String RECTANGLE = "rect"; private RepeatingView areas; /** * Constructs a client-side image map which is "attached" to the given {@link Image} component. * * @param id * the component id * @param image * the image component */ public ClientSideImageMap(String id, Image image) { super(id); setOutputMarkupId(true); add(AttributeModifier.replace("name", new PropertyModel<String>(this, "markupId"))); image.add(AttributeModifier.replace("usemap", new UsemapModel())); areas = new RepeatingView("area"); add(areas); } private String circleCoordinates(int x, int y, int radius) { return x + "," + y + "," + radius; } private String polygonCoordinates(int... coordinates) { final StringBuilder buffer = new StringBuilder(); for (int i = 0; i < coordinates.length; i++) { buffer.append(coordinates[i]); if (i < (coordinates.length - 1)) { buffer.append(','); } } return buffer.toString(); } private String rectangleCoordinates(int x1, int y1, int x2, int y2) { return x1 + "," + y1 + "," + x2 + "," + y2; } private String shapeCoordinates(Shape shape) { final StringBuilder sb = new StringBuilder(); final PathIterator pi = shape.getPathIterator(null, 1.0); final float[] coords = new float[6]; final float[] lastMove = new float[2]; while (!pi.isDone()) { switch (pi.currentSegment(coords)) { case PathIterator.SEG_MOVETO : if (sb.length() != 0) { sb.append(','); } sb.append(Math.round(coords[0])); sb.append(','); sb.append(Math.round(coords[1])); lastMove[0] = coords[0]; lastMove[1] = coords[1]; break; case PathIterator.SEG_LINETO : if (sb.length() != 0) { sb.append(','); } sb.append(Math.round(coords[0])); sb.append(','); sb.append(Math.round(coords[1])); break; case PathIterator.SEG_CLOSE : if (sb.length() != 0) { sb.append(','); } sb.append(Math.round(lastMove[0])); sb.append(','); sb.append(Math.round(lastMove[1])); break; } pi.next(); } return sb.toString(); } @Override protected void onComponentTag(ComponentTag tag) { // Must be attached to an map tag checkComponentTag(tag, "map"); super.onComponentTag(tag); } /** * Generates a unique id string. This makes it easy to add items to be rendered w/out having to * worry about generating unique id strings in your code. * * @return unique child id */ public String newChildId() { return areas.newChildId(); } /** * Adds a circle-shaped area centered at (x,y) with radius r. * * @param link * the link * @param x * x coordinate of the center of the circle * @param y * y coordinate of center * @param radius * the radius * @return this */ public ClientSideImageMap addCircleArea(AbstractLink link, int x, int y, int radius) { areas.add(link); link.add(new Area(circleCoordinates(x, y, radius), CIRCLE)); return this; } /** * Adds a polygon-shaped area defined by coordinates. * * @param link * the link * @param coordinates * the coordinates for the polygon * @return This */ public ClientSideImageMap addPolygonArea(AbstractLink link, int... coordinates) { areas.add(link); link.add(new Area(polygonCoordinates(coordinates), POLYGON)); return this; } /** * Adds a rectangular-shaped area. * * @param link * the link * @param x1 * top left x * @param y1 * top left y * @param x2 * bottom right x * @param y2 * bottom right y * @return this */ public ClientSideImageMap addRectangleArea(AbstractLink link, int x1, int y1, int x2, int y2) { areas.add(link); link.add(new Area(rectangleCoordinates(x1, y1, x2, y2), RECTANGLE)); return this; } /** * Adds an area defined by a shape object. * * @param link * the link * @param shape * the shape * @return this */ public ClientSideImageMap addShapeArea(AbstractLink link, Shape shape) { areas.add(link); link.add(new Area(shapeCoordinates(shape), POLYGON)); return this; } /** * Encapsulates the concept of an <area> within a <map>. */ private static class Area extends Behavior { private static final long serialVersionUID = 1L; private final String coordinates; private final String type; protected Area(final String coordinates, final String type) { this.coordinates = coordinates; this.type = type; } @Override public void onComponentTag(Component component, ComponentTag tag) { tag.put("shape", type); tag.put("coords", coordinates); } } private class UsemapModel extends Model<String> { private static final long serialVersionUID = 1L; @Override public String getObject() { return "#" + getMarkupId(); } } }
/* * ---------------------------------------- * Jenkins Test Tracker * ---------------------------------------- * Produced by Dan Grew * 2016 * ---------------------------------------- */ package uk.dangrew.jtt.desktop.mc.sides.jobs; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Before; import org.junit.Test; import javafx.scene.control.TreeItem; import uk.dangrew.jtt.desktop.styling.SystemStyling; import uk.dangrew.jtt.model.jobs.BuildResultStatus; import uk.dangrew.jtt.model.jobs.JenkinsJob; import uk.dangrew.jtt.model.jobs.JenkinsJobImpl; import uk.dangrew.jtt.model.storage.database.JenkinsDatabase; import uk.dangrew.jtt.model.storage.database.TestJenkinsDatabaseImpl; import uk.dangrew.kode.launch.TestApplication; /** * {@link BuildResultStatusLayout} test. */ public class BuildResultStatusLayoutTest { private JenkinsJob jobA; private JenkinsJob jobB; private JenkinsJob jobC; private JenkinsJob jobD; private JenkinsJob jobE; private JenkinsJob jobF; private JenkinsDatabase database; private JobProgressTree tree; private BuildResultStatusLayout systemUnderTest; @Before public void initialiseSystemUnderTest(){ TestApplication.startPlatform(); SystemStyling.initialise(); database = new TestJenkinsDatabaseImpl(); jobA = new JenkinsJobImpl( "JobA" ); jobB = new JenkinsJobImpl( "JobB" ); jobC = new JenkinsJobImpl( "JobC" ); jobD = new JenkinsJobImpl( "JobD" ); jobE = new JenkinsJobImpl( "JobE" ); jobF = new JenkinsJobImpl( "JobF" ); tree = new JobProgressTree( database ); systemUnderTest = new BuildResultStatusLayout( tree ); }//End Method @Test( expected = IllegalArgumentException.class ) public void shouldNotAcceptNullJobsList() { systemUnderTest.reconstructTree( null ); }//End Method @Test public void shouldLayoutJobsAccordingToBuildResultStatusAlphabetically() { jobA.setBuildStatus( BuildResultStatus.ABORTED ); jobB.setBuildStatus( BuildResultStatus.FAILURE ); jobC.setBuildStatus( BuildResultStatus.NOT_BUILT ); jobD.setBuildStatus( BuildResultStatus.SUCCESS ); jobE.setBuildStatus( BuildResultStatus.UNKNOWN ); jobF.setBuildStatus( BuildResultStatus.UNSTABLE ); systemUnderTest.reconstructTree( Arrays.asList( jobA, jobB, jobC, jobD, jobE, jobF ) ); List< TreeItem< JobProgressTreeItem > > children = systemUnderTest.getBranch( BuildResultStatus.ABORTED ).getChildren(); assertThat( children, hasSize( 1 ) ); assertThat( children.get( 0 ).getValue().getJenkinsJob(), is( jobA ) ); children = systemUnderTest.getBranch( BuildResultStatus.FAILURE ).getChildren(); assertThat( children, hasSize( 1 ) ); assertThat( children.get( 0 ).getValue().getJenkinsJob(), is( jobB ) ); children = systemUnderTest.getBranch( BuildResultStatus.NOT_BUILT ).getChildren(); assertThat( children, hasSize( 1 ) ); assertThat( children.get( 0 ).getValue().getJenkinsJob(), is( jobC ) ); children = systemUnderTest.getBranch( BuildResultStatus.SUCCESS ).getChildren(); assertThat( children, hasSize( 1 ) ); assertThat( children.get( 0 ).getValue().getJenkinsJob(), is( jobD ) ); children = systemUnderTest.getBranch( BuildResultStatus.UNKNOWN ).getChildren(); assertThat( children, hasSize( 1 ) ); assertThat( children.get( 0 ).getValue().getJenkinsJob(), is( jobE ) ); children = systemUnderTest.getBranch( BuildResultStatus.UNSTABLE ).getChildren(); assertThat( children, hasSize( 1 ) ); assertThat( children.get( 0 ).getValue().getJenkinsJob(), is( jobF ) ); }//End Method @Test public void shouldGroupdJobsOfSameStatus() { jobA.setBuildStatus( BuildResultStatus.SUCCESS ); jobB.setBuildStatus( BuildResultStatus.SUCCESS ); jobC.setBuildStatus( BuildResultStatus.SUCCESS ); systemUnderTest.reconstructTree( Arrays.asList( jobA, jobB, jobC ) ); List< TreeItem< JobProgressTreeItem > > children = systemUnderTest.getBranch( BuildResultStatus.SUCCESS ).getChildren(); assertThat( children, hasSize( 3 ) ); assertThat( children.get( 0 ).getValue().getJenkinsJob(), is( jobA ) ); assertThat( children.get( 1 ).getValue().getJenkinsJob(), is( jobB ) ); assertThat( children.get( 2 ).getValue().getJenkinsJob(), is( jobC ) ); }//End Method @SuppressWarnings("unchecked")//contains with generic @Test public void shouldLayoutStatusesAlphabetically() { systemUnderTest.reconstructTree( new ArrayList<>() ); assertThat( tree.getRoot().getChildren(), contains( systemUnderTest.getBranch( BuildResultStatus.ABORTED ), systemUnderTest.getBranch( BuildResultStatus.FAILURE ), systemUnderTest.getBranch( BuildResultStatus.NOT_BUILT ), systemUnderTest.getBranch( BuildResultStatus.SUCCESS ), systemUnderTest.getBranch( BuildResultStatus.UNKNOWN ), systemUnderTest.getBranch( BuildResultStatus.UNSTABLE ) ) ); }//End Method @Test public void shouldProvideBranchesWithItems(){ for ( BuildResultStatus status : BuildResultStatus.values() ) { TreeItem< JobProgressTreeItem > branch = systemUnderTest.getBranch( status ); assertThat( branch.getValue(), is( instanceOf( JobProgressTreeItemBranch.class ) ) ); JobProgressTreeItemBranch branchItem = ( JobProgressTreeItemBranch ) branch.getValue(); assertThat( branchItem.getName(), is( status.name() ) ); } }//End Method @Test( expected = IllegalStateException.class ) public void shouldNotAddJobIfNotConstructed() { systemUnderTest.add( jobA ); }//End Method @Test( expected = IllegalStateException.class ) public void shouldNotRemoveJobIfNotConstructed() { systemUnderTest.remove( jobA ); }//End Method @Test( expected = IllegalStateException.class ) public void shouldNotUpdateJobIfNotConstructed() { systemUnderTest.update( jobA ); }//End Method @Test public void shouldAddJobNotInTree() { systemUnderTest.reconstructTree( new ArrayList<>() ); systemUnderTest.add( jobA ); assertJobPresent( jobA, 0 ); }//End Method @Test public void shouldIgnoreAddJobAlreadyInTree() { systemUnderTest.reconstructTree( Arrays.asList( jobA ) ); systemUnderTest.add( jobA ); assertJobPresent( jobA, 0 ); assertBranchSize( BuildResultStatus.NOT_BUILT, 1 ); }//End Method @Test public void shouldRemoveJobFromTree() { systemUnderTest.reconstructTree( Arrays.asList( jobA, jobB, jobC ) ); systemUnderTest.remove( jobB ); assertJobPresent( jobA, 0 ); assertJobPresent( jobC, 1 ); assertBranchSize( BuildResultStatus.NOT_BUILT, 2 ); }//End Method @Test public void shouldIgnoreRemoveJobIfNotPresent() { systemUnderTest.reconstructTree( Arrays.asList( jobA, jobB, jobC ) ); systemUnderTest.remove( jobD ); assertJobPresent( jobA, 0 ); assertJobPresent( jobB, 1 ); assertJobPresent( jobC, 2 ); assertBranchSize( BuildResultStatus.NOT_BUILT, 3 ); }//End Method @Test public void shouldUpdateJobPositionWhenStatusChanges() { systemUnderTest.reconstructTree( Arrays.asList( jobA, jobB, jobC ) ); jobB.setBuildStatus( BuildResultStatus.SUCCESS ); systemUnderTest.update( jobB ); assertJobPresent( jobA, 0 ); assertJobPresent( jobC, 1 ); assertJobPresent( jobB, 0 ); assertBranchSize( BuildResultStatus.NOT_BUILT, 2 ); assertBranchSize( BuildResultStatus.SUCCESS, 1 ); }//End Method @Test public void shouldIgnoreUpdateJobWhenStatusChangesIfNotPresent() { systemUnderTest.reconstructTree( Arrays.asList( jobA, jobB, jobC ) ); jobD.setBuildStatus( BuildResultStatus.SUCCESS ); systemUnderTest.update( jobD ); assertJobPresent( jobA, 0 ); assertJobPresent( jobB, 1 ); assertJobPresent( jobC, 2 ); assertBranchSize( BuildResultStatus.NOT_BUILT, 3 ); assertBranchSize( BuildResultStatus.SUCCESS, 0 ); }//End Method @Test public void shouldContainJenkinsJob(){ systemUnderTest.reconstructTree( Arrays.asList( jobB, jobD ) ); assertThat( systemUnderTest.contains( jobA ), is( false ) ); assertThat( systemUnderTest.contains( jobB ), is( true ) ); assertThat( systemUnderTest.contains( jobC ), is( false ) ); assertThat( systemUnderTest.contains( jobD ), is( true ) ); assertThat( systemUnderTest.contains( jobE ), is( false ) ); assertThat( systemUnderTest.contains( jobF ), is( false ) ); }//End Method /** * Method to assert that the {@link JenkinsJob} is present in the correct branch and location. * @param job the {@link JenkinsJob} in question. * @param index the index in the children. */ private void assertJobPresent( JenkinsJob job, int index ) { TreeItem< JobProgressTreeItem > branch = systemUnderTest.getBranch( job.buildProperty().get().getValue() ); assertThat( branch.getChildren().get( index ).getValue().getJenkinsJob(), is( job ) ); }//End Method /** * Method to assert that the {@link BuildResultStatus} branch has the correct size. * @param status the {@link BuildResultStatus} for the branch. * @param size the size the branch should be. */ private void assertBranchSize( BuildResultStatus status, int size ) { TreeItem< JobProgressTreeItem > branch = systemUnderTest.getBranch( status ); assertThat( branch.getChildren(), hasSize( size ) ); }//End Method }//End Class
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.execution; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.trino.Session; import io.trino.execution.StateMachine.StateChangeListener; import io.trino.server.BasicQueryInfo; import io.trino.server.BasicQueryStats; import io.trino.spi.ErrorCode; import io.trino.spi.QueryId; import io.trino.spi.memory.MemoryPoolId; import org.joda.time.DateTime; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.OptionalDouble; import static com.google.common.base.Preconditions.checkState; import static io.trino.SystemSessionProperties.QUERY_PRIORITY; import static io.trino.execution.QueryState.FAILED; import static io.trino.execution.QueryState.FINISHED; import static io.trino.execution.QueryState.QUEUED; import static io.trino.execution.QueryState.RUNNING; import static io.trino.server.DynamicFilterService.DynamicFiltersStats; import static io.trino.testing.TestingSession.testSessionBuilder; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; public class MockManagedQueryExecution implements ManagedQueryExecution { private final List<StateChangeListener<QueryState>> listeners = new ArrayList<>(); private final Session session; private DataSize memoryUsage; private Duration cpuUsage; private QueryState state = QUEUED; private Throwable failureCause; private MockManagedQueryExecution(String queryId, int priority, DataSize memoryUsage, Duration cpuUsage) { requireNonNull(queryId, "queryId is null"); this.session = testSessionBuilder() .setQueryId(QueryId.valueOf(queryId)) .setSystemProperty(QUERY_PRIORITY, String.valueOf(priority)) .build(); this.memoryUsage = requireNonNull(memoryUsage, "memoryUsage is null"); this.cpuUsage = requireNonNull(cpuUsage, "cpuUsage is null"); } public void consumeCpuTimeMillis(long cpuTimeDeltaMillis) { checkState(state == RUNNING, "cannot consume CPU in a non-running state"); long newCpuTime = cpuUsage.toMillis() + cpuTimeDeltaMillis; this.cpuUsage = new Duration(newCpuTime, MILLISECONDS); } public void setMemoryUsage(DataSize memoryUsage) { checkState(state == RUNNING, "cannot set memory usage in a non-running state"); this.memoryUsage = memoryUsage; } public void complete() { memoryUsage = DataSize.ofBytes(0); state = FINISHED; fireStateChange(); } public Throwable getThrowable() { return failureCause; } @Override public Session getSession() { return session; } @Override public Optional<ErrorCode> getErrorCode() { return Optional.empty(); } @Override public BasicQueryInfo getBasicQueryInfo() { return new BasicQueryInfo( new QueryId("test"), session.toSessionRepresentation(), Optional.empty(), state, new MemoryPoolId("test"), !state.isDone(), URI.create("http://test"), "SELECT 1", Optional.empty(), Optional.empty(), new BasicQueryStats( new DateTime(1), new DateTime(2), new Duration(3, NANOSECONDS), new Duration(4, NANOSECONDS), new Duration(5, NANOSECONDS), 6, 7, 8, 9, DataSize.ofBytes(14), 15, DataSize.ofBytes(13), 16.0, memoryUsage, memoryUsage, DataSize.ofBytes(19), DataSize.ofBytes(20), cpuUsage, new Duration(22, NANOSECONDS), false, ImmutableSet.of(), OptionalDouble.empty()), null, null, Optional.empty()); } @Override public QueryInfo getFullQueryInfo() { return new QueryInfo( new QueryId("test"), session.toSessionRepresentation(), state, new MemoryPoolId("test"), !state.isDone(), URI.create("http://test"), ImmutableList.of(), "SELECT 1", Optional.empty(), new QueryStats( new DateTime(1), new DateTime(2), new DateTime(3), new DateTime(4), new Duration(6, NANOSECONDS), new Duration(5, NANOSECONDS), new Duration(31, NANOSECONDS), new Duration(41, NANOSECONDS), new Duration(7, NANOSECONDS), new Duration(8, NANOSECONDS), new Duration(100, NANOSECONDS), new Duration(200, NANOSECONDS), 9, 10, 11, 12, 13, 15, 30, 16, 17.0, DataSize.ofBytes(18), DataSize.ofBytes(19), DataSize.ofBytes(20), DataSize.ofBytes(21), DataSize.ofBytes(22), DataSize.ofBytes(30), DataSize.ofBytes(23), DataSize.ofBytes(24), DataSize.ofBytes(25), DataSize.ofBytes(26), true, new Duration(20, NANOSECONDS), new Duration(21, NANOSECONDS), new Duration(23, NANOSECONDS), false, ImmutableSet.of(), DataSize.ofBytes(241), 251, new Duration(24, NANOSECONDS), DataSize.ofBytes(242), 252, DataSize.ofBytes(25), 26, DataSize.ofBytes(27), 28, DataSize.ofBytes(29), 30, DataSize.ofBytes(31), ImmutableList.of(), DynamicFiltersStats.EMPTY, ImmutableList.of()), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of(), ImmutableSet.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableSet.of(), Optional.empty(), false, "", Optional.empty(), null, null, ImmutableList.of(), ImmutableSet.of(), Optional.empty(), ImmutableList.of(), ImmutableList.of(), state.isDone(), Optional.empty(), Optional.empty()); } @Override public DataSize getUserMemoryReservation() { return memoryUsage; } @Override public DataSize getTotalMemoryReservation() { return memoryUsage; } @Override public Duration getTotalCpuTime() { return cpuUsage; } @Override public QueryState getState() { return state; } @Override public void startWaitingForResources() { state = RUNNING; fireStateChange(); } @Override public void fail(Throwable cause) { memoryUsage = DataSize.ofBytes(0); state = FAILED; failureCause = cause; fireStateChange(); } @Override public boolean isDone() { return getState().isDone(); } @Override public void addStateChangeListener(StateChangeListener<QueryState> stateChangeListener) { listeners.add(stateChangeListener); } private void fireStateChange() { for (StateChangeListener<QueryState> listener : listeners) { listener.stateChanged(state); } } public static class MockManagedQueryExecutionBuilder { private DataSize memoryUsage = DataSize.ofBytes(0); private Duration cpuUsage = new Duration(0, MILLISECONDS); private int priority = 1; private String queryId = "query_id"; public MockManagedQueryExecutionBuilder() {} public MockManagedQueryExecutionBuilder withInitialMemoryUsage(DataSize memoryUsage) { this.memoryUsage = memoryUsage; return this; } public MockManagedQueryExecutionBuilder withInitialCpuUsageMillis(long cpuUsageMillis) { this.cpuUsage = new Duration(cpuUsageMillis, MILLISECONDS); return this; } public MockManagedQueryExecutionBuilder withPriority(int priority) { this.priority = priority; return this; } public MockManagedQueryExecutionBuilder withQueryId(String queryId) { this.queryId = queryId; return this; } public MockManagedQueryExecution build() { return new MockManagedQueryExecution(queryId, priority, memoryUsage, cpuUsage); } } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.completion; import com.intellij.application.options.CodeStyle; import com.intellij.codeInsight.TailType; import com.intellij.codeInsight.lookup.*; import com.intellij.codeInsight.lookup.impl.JavaElementLookupRenderer; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.util.ClassConditionKey; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.psi.util.PsiUtilCore; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collections; import java.util.Set; /** * @author peter */ public class JavaPsiClassReferenceElement extends LookupItem<Object> implements TypedLookupItem { public static final ClassConditionKey<JavaPsiClassReferenceElement> CLASS_CONDITION_KEY = ClassConditionKey.create(JavaPsiClassReferenceElement.class); private final SmartPsiElementPointer<PsiClass> myClass; private final String myQualifiedName; private String myForcedPresentableName; private final String myPackageDisplayName; private PsiSubstitutor mySubstitutor = PsiSubstitutor.EMPTY; public JavaPsiClassReferenceElement(PsiClass psiClass) { super(psiClass.getName(), psiClass.getName()); myQualifiedName = psiClass.getQualifiedName(); myClass = SmartPointerManager.getInstance(psiClass.getProject()).createSmartPsiElementPointer(psiClass); setInsertHandler(AllClassesGetter.TRY_SHORTENING); setTailType(TailType.NONE); myPackageDisplayName = PsiFormatUtil.getPackageDisplayName(psiClass); } public String getForcedPresentableName() { return myForcedPresentableName; } @Nullable @Override public PsiType getType() { PsiClass psiClass = getObject(); return JavaPsiFacade.getElementFactory(psiClass.getProject()).createType(psiClass, getSubstitutor()); } public PsiSubstitutor getSubstitutor() { return mySubstitutor; } public JavaPsiClassReferenceElement setSubstitutor(PsiSubstitutor substitutor) { mySubstitutor = substitutor; return this; } @NotNull @Override public String getLookupString() { if (myForcedPresentableName != null) { return myForcedPresentableName; } return super.getLookupString(); } @Override public Set<String> getAllLookupStrings() { if (myForcedPresentableName != null) { return Collections.singleton(myForcedPresentableName); } return super.getAllLookupStrings(); } public void setForcedPresentableName(String forcedPresentableName) { myForcedPresentableName = forcedPresentableName; } @NotNull @Override public PsiClass getObject() { PsiClass element = myClass.getElement(); if (element == null) throw new IllegalStateException("Cannot restore from " + myClass); return element; } @Override public boolean isValid() { return myClass.getElement() != null; } @Override public boolean equals(final Object o) { if (this == o) return true; if (!(o instanceof JavaPsiClassReferenceElement)) return false; final JavaPsiClassReferenceElement that = (JavaPsiClassReferenceElement)o; if (myQualifiedName != null) { return myQualifiedName.equals(that.myQualifiedName); } return Comparing.equal(myClass, that.myClass); } public String getQualifiedName() { return myQualifiedName; } @Override public int hashCode() { final String s = myQualifiedName; return s == null ? 239 : s.hashCode(); } @Override public void renderElement(LookupElementPresentation presentation) { renderClassItem(presentation, this, getObject(), false, " " + myPackageDisplayName, mySubstitutor); } public static void renderClassItem(LookupElementPresentation presentation, LookupElement item, PsiClass psiClass, boolean diamond, @NotNull String locationString, @NotNull PsiSubstitutor substitutor) { if (!(psiClass instanceof PsiTypeParameter)) { presentation.setIcon(DefaultLookupItemRenderer.getRawIcon(item, presentation.isReal())); } boolean strikeout = JavaElementLookupRenderer.isToStrikeout(item); presentation.setItemText(getName(psiClass, item, diamond, substitutor)); presentation.setStrikeout(strikeout); String tailText = locationString; if (item instanceof PsiTypeLookupItem) { if (((PsiTypeLookupItem)item).isIndicateAnonymous() && (psiClass.isInterface() || psiClass.hasModifierProperty(PsiModifier.ABSTRACT)) || ((PsiTypeLookupItem)item).isAddArrayInitializer()) { tailText = "{...}" + tailText; } } if (substitutor == PsiSubstitutor.EMPTY && !diamond && psiClass.getTypeParameters().length > 0) { String separator = "," + (showSpaceAfterComma(psiClass) ? " " : ""); tailText = "<" + StringUtil.join(psiClass.getTypeParameters(), PsiTypeParameter::getName, separator) + ">" + tailText; } presentation.setTailText(tailText, true); } public String getLocationString() { return " " + myPackageDisplayName; } private static String getName(final PsiClass psiClass, final LookupElement item, boolean diamond, @NotNull PsiSubstitutor substitutor) { String forced = item instanceof JavaPsiClassReferenceElement ? ((JavaPsiClassReferenceElement)item).getForcedPresentableName() : item instanceof PsiTypeLookupItem ? ((PsiTypeLookupItem)item).getForcedPresentableName() : null; if (forced != null) { return forced; } String name = PsiUtilCore.getName(psiClass); if (diamond) { return name + "<>"; } if (substitutor != PsiSubstitutor.EMPTY) { final PsiTypeParameter[] params = psiClass.getTypeParameters(); if (params.length > 0) { return name + formatTypeParameters(substitutor, params); } } return StringUtil.notNullize(name); } @NotNull private static String formatTypeParameters(@NotNull final PsiSubstitutor substitutor, final PsiTypeParameter[] params) { final boolean space = showSpaceAfterComma(params[0]); StringBuilder buffer = new StringBuilder(); buffer.append("<"); for(int i = 0; i < params.length; i++){ final PsiTypeParameter param = params[i]; final PsiType type = substitutor.substitute(param); if(type == null){ return ""; } if (type instanceof PsiClassType && ((PsiClassType)type).getParameters().length > 0) { buffer.append(((PsiClassType)type).rawType().getPresentableText()).append("<...>"); } else { buffer.append(type.getPresentableText()); } if(i < params.length - 1) { buffer.append(","); if (space) { buffer.append(" "); } } } buffer.append(">"); return buffer.toString(); } private static boolean showSpaceAfterComma(PsiClass element) { return CodeStyle.getLanguageSettings(element.getContainingFile(), JavaLanguage.INSTANCE).SPACE_AFTER_COMMA; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.fields; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.lookup.FieldLookup; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.ReadableDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.function.Function; import static java.util.Collections.singleton; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.client.Requests.refreshRequest; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class SearchFieldsIT extends ESIntegTestCase { @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { @Override @SuppressWarnings("unchecked") protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>(); scripts.put("doc['num1'].value", vars -> { Map<?, ?> doc = (Map) vars.get("doc"); ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1"); return num1.getValue(); }); scripts.put("doc['num1'].value * factor", vars -> { Map<?, ?> doc = (Map) vars.get("doc"); ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1"); Double factor = (Double) vars.get("factor"); return num1.getValue() * factor; }); scripts.put("doc['date'].date.millis", vars -> { Map<?, ?> doc = (Map) vars.get("doc"); ScriptDocValues.Dates dates = (ScriptDocValues.Dates) doc.get("date"); return dates.getValue().getMillis(); }); scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1")); scripts.put("_fields._uid.value", vars -> fieldsScript(vars, "_uid")); scripts.put("_fields._id.value", vars -> fieldsScript(vars, "_id")); scripts.put("_fields._type.value", vars -> fieldsScript(vars, "_type")); scripts.put("_source.obj1", vars -> sourceScript(vars, "obj1")); scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test")); scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test")); scripts.put("_source.obj2", vars -> sourceScript(vars, "obj2")); scripts.put("_source.obj2.arr2", vars -> sourceScript(vars, "obj2.arr2")); scripts.put("_source.arr3", vars -> sourceScript(vars, "arr3")); scripts.put("return null", vars -> null); scripts.put("doc['l'].values", vars -> docScript(vars, "l")); scripts.put("doc['ml'].values", vars -> docScript(vars, "ml")); scripts.put("doc['d'].values", vars -> docScript(vars, "d")); scripts.put("doc['md'].values", vars -> docScript(vars, "md")); scripts.put("doc['s'].values", vars -> docScript(vars, "s")); scripts.put("doc['ms'].values", vars -> docScript(vars, "ms")); return scripts; } @SuppressWarnings("unchecked") static Object fieldsScript(Map<String, Object> vars, String fieldName) { Map<?, ?> fields = (Map) vars.get("_fields"); FieldLookup fieldLookup = (FieldLookup) fields.get(fieldName); return fieldLookup.getValue(); } @SuppressWarnings("unchecked") static Object sourceScript(Map<String, Object> vars, String path) { Map<String, Object> source = (Map) vars.get("_source"); return XContentMapValues.extractValue(path, source); } @SuppressWarnings("unchecked") static Object docScript(Map<String, Object> vars, String fieldName) { Map<?, ?> doc = (Map) vars.get("doc"); ScriptDocValues<?> values = (ScriptDocValues<?>) doc.get(fieldName); return values.getValues(); } } public void testStoredFields() throws Exception { createIndex("test"); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() .startObject("field2").field("type", "text").field("store", false).endObject() .startObject("field3").field("type", "text").field("store", true).endObject() .endObject().endObject().endObject().string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("field1", "value1") .field("field2", "value2") .field("field3", "value3") .endObject()).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field1").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); // field2 is not stored, check that it is not extracted from source. searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field2").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field2"), nullValue()); searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) .addStoredField("*3") .addStoredField("field1") .addStoredField("field2") .get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getSourceAsMap(), nullValue()); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) .addStoredField("*") .addStoredField("_source") .get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getSourceAsMap(), notNullValue()); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); } public void testScriptDocAndFields() throws Exception { createIndex("test"); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("num1").field("type", "double").field("store", true).endObject() .endObject().endObject().endObject().string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject() .field("test", "value beck") .field("num1", 1.0f) .field("date", "1970-01-01T00:00:00") .endObject()) .execute().actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); client().prepareIndex("test", "type1", "2") .setSource(jsonBuilder().startObject() .field("test", "value beck") .field("num1", 2.0f) .field("date", "1970-01-01T00:00:25") .endObject()) .get(); client().admin().indices().prepareFlush().execute().actionGet(); client().prepareIndex("test", "type1", "3") .setSource(jsonBuilder().startObject() .field("test", "value beck") .field("num1", 3.0f) .field("date", "1970-01-01T00:02:00") .endObject()) .get(); client().admin().indices().refresh(refreshRequest()).actionGet(); logger.info("running doc['num1'].value"); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .addScriptField("sNum1_field", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields['num1'].value", Collections.emptyMap())) .addScriptField("date1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap())) .execute().actionGet(); assertNoFailures(response); assertThat(response.getHits().getTotalHits(), equalTo(3L)); assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set<String> fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(0).getFields().get("sNum1_field").getValues().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(0).getFields().get("date1").getValues().get(0), equalTo(0L)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getFields().get("sNum1_field").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getFields().get("date1").getValues().get(0), equalTo(25000L)); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); assertThat(response.getHits().getAt(2).getFields().get("sNum1_field").getValues().get(0), equalTo(3.0)); assertThat(response.getHits().getAt(2).getFields().get("date1").getValues().get(0), equalTo(120000L)); logger.info("running doc['num1'].value * factor"); Map<String, Object> params = MapBuilder.<String, Object>newMapBuilder().put("factor", 2.0).map(); response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", params)) .get(); assertThat(response.getHits().getTotalHits(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(4.0)); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(6.0)); } public void testUidBasedScriptFields() throws Exception { prepareCreate("test").addMapping("type1", "num1", "type=long").execute().actionGet(); int numDocs = randomIntBetween(1, 30); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(jsonBuilder().startObject().field("num1", i).endObject()); } indexRandom(true, indexRequestBuilders); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) .addScriptField("uid", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._uid.value", Collections.emptyMap())) .get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(singleton("uid"))); assertThat(response.getHits().getAt(i).getFields().get("uid").getValue(), equalTo("type1#" + Integer.toString(i))); } response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())) .get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(singleton("id"))); assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i))); } response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) .addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap())) .get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(singleton("type"))); assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1")); } response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())) .addScriptField("uid", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._uid.value", Collections.emptyMap())) .addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap())) .get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(newHashSet("uid", "type", "id"))); assertThat(response.getHits().getAt(i).getFields().get("uid").getValue(), equalTo("type1#" + Integer.toString(i))); assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1")); assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i))); } } public void testScriptFieldUsingSource() throws Exception { createIndex("test"); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject() .startObject("obj1").field("test", "something").endObject() .startObject("obj2").startArray("arr2").value("arr_value1").value("arr_value2").endArray().endObject() .startArray("arr3").startObject().field("arr3_field1", "arr3_value1").endObject().endArray() .endObject()) .execute().actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) .addScriptField("s_obj1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj1", Collections.emptyMap())) .addScriptField("s_obj1_test", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj1.test", Collections.emptyMap())) .addScriptField("s_obj2", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj2", Collections.emptyMap())) .addScriptField("s_obj2_arr2", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj2.arr2", Collections.emptyMap())) .addScriptField("s_arr3", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.arr3", Collections.emptyMap())) .get(); assertThat("Failures " + Arrays.toString(response.getShardFailures()), response.getShardFailures().length, equalTo(0)); assertThat(response.getHits().getAt(0).field("s_obj1_test").getValue().toString(), equalTo("something")); Map<String, Object> sObj1 = response.getHits().getAt(0).field("s_obj1").getValue(); assertThat(sObj1.get("test").toString(), equalTo("something")); assertThat(response.getHits().getAt(0).field("s_obj1_test").getValue().toString(), equalTo("something")); Map<String, Object> sObj2 = response.getHits().getAt(0).field("s_obj2").getValue(); List<?> sObj2Arr2 = (List<?>) sObj2.get("arr2"); assertThat(sObj2Arr2.size(), equalTo(2)); assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1")); assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2")); sObj2Arr2 = response.getHits().getAt(0).field("s_obj2_arr2").getValues(); assertThat(sObj2Arr2.size(), equalTo(2)); assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1")); assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2")); List<?> sObj2Arr3 = response.getHits().getAt(0).field("s_arr3").getValues(); assertThat(((Map<?, ?>) sObj2Arr3.get(0)).get("arr3_field1").toString(), equalTo("arr3_value1")); } public void testScriptFieldsForNullReturn() throws Exception { client().prepareIndex("test", "type1", "1") .setSource("foo", "bar") .setRefreshPolicy("true").get(); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) .addScriptField("test_script_1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap())) .get(); assertNoFailures(response); DocumentField fieldObj = response.getHits().getAt(0).field("test_script_1"); assertThat(fieldObj, notNullValue()); List<?> fieldValues = fieldObj.getValues(); assertThat(fieldValues, hasSize(1)); assertThat(fieldValues.get(0), nullValue()); } public void testPartialFields() throws Exception { createIndex("test"); client().prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject() .field("field1", "value1") .startObject("obj1") .startArray("arr1") .startObject().startObject("obj2").field("field2", "value21").endObject().endObject() .startObject().startObject("obj2").field("field2", "value22").endObject().endObject() .endArray() .endObject() .endObject()) .execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); } public void testStoredFieldsWithoutSource() throws Exception { createIndex("test"); String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("_source") .field("enabled", false) .endObject() .startObject("properties") .startObject("byte_field") .field("type", "byte") .field("store", true) .endObject() .startObject("short_field") .field("type", "short") .field("store", true) .endObject() .startObject("integer_field") .field("type", "integer") .field("store", true) .endObject() .startObject("long_field") .field("type", "long") .field("store", true) .endObject() .startObject("float_field") .field("type", "float") .field("store", true) .endObject() .startObject("double_field") .field("type", "double") .field("store", true) .endObject() .startObject("date_field") .field("type", "date") .field("store", true) .endObject() .startObject("boolean_field") .field("type", "boolean") .field("store", true) .endObject() .startObject("binary_field") .field("type", "binary") .field("store", true) .endObject() .endObject() .endObject() .endObject() .string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("byte_field", (byte) 1) .field("short_field", (short) 2) .field("integer_field", 3) .field("long_field", 4L) .field("float_field", 5.0f) .field("double_field", 6.0d) .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC))) .field("boolean_field", true) .field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8"))) .endObject()).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()) .addStoredField("byte_field") .addStoredField("short_field") .addStoredField("integer_field") .addStoredField("long_field") .addStoredField("float_field") .addStoredField("double_field") .addStoredField("date_field") .addStoredField("boolean_field") .addStoredField("binary_field") .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); Set<String> fields = new HashSet<>(searchResponse.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", "float_field", "double_field", "date_field", "boolean_field", "binary_field"))); SearchHit searchHit = searchResponse.getHits().getAt(0); assertThat(searchHit.getFields().get("byte_field").getValue().toString(), equalTo("1")); assertThat(searchHit.getFields().get("short_field").getValue().toString(), equalTo("2")); assertThat(searchHit.getFields().get("integer_field").getValue(), equalTo((Object) 3)); assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L)); assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f)); assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)); assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime)); assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE)); assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8")))); } public void testSearchFieldsMetaData() throws Exception { client().prepareIndex("my-index", "my-type1", "1") .setRouting("1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); SearchResponse searchResponse = client().prepareSearch("my-index") .setTypes("my-type1") .addStoredField("field1").addStoredField("_routing") .get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue()); assertThat(searchResponse.getHits().getAt(0).field("_routing").isMetadataField(), equalTo(true)); assertThat(searchResponse.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); } public void testSearchFieldsNonLeafField() throws Exception { client().prepareIndex("my-index", "my-type1", "1") .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .setRefreshPolicy(IMMEDIATE) .get(); assertFailures(client().prepareSearch("my-index").setTypes("my-type1").addStoredField("field1"), RestStatus.BAD_REQUEST, containsString("field [field1] isn't a leaf field")); } public void testGetFieldsComplexField() throws Exception { client().admin().indices().prepareCreate("my-index") .setSettings("index.refresh_interval", -1) .addMapping("doc", jsonBuilder() .startObject() .startObject("doc") .startObject("properties") .startObject("field1") .field("type", "object") .startObject("properties") .startObject("field2") .field("type", "object") .startObject("properties") .startObject("field3") .field("type", "object") .startObject("properties") .startObject("field4") .field("type", "text") .field("store", true) .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject()) .get(); BytesReference source = jsonBuilder().startObject() .startArray("field1") .startObject() .startObject("field2") .startArray("field3") .startObject() .field("field4", "value1") .endObject() .endArray() .endObject() .endObject() .startObject() .startObject("field2") .startArray("field3") .startObject() .field("field4", "value2") .endObject() .endArray() .endObject() .endObject() .endArray() .endObject().bytes(); client().prepareIndex("my-index", "doc", "1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get(); String field = "field1.field2.field3.field4"; SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField(field).get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); } // see #8203 public void testSingleValueFieldDatatField() throws ExecutionException, InterruptedException { assertAcked(client().admin().indices().prepareCreate("test") .addMapping("type", "test_field", "type=keyword").get()); indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar")); refresh(); SearchResponse searchResponse = client().prepareSearch("test").setTypes("type").setSource( new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).fieldDataField("test_field")).get(); assertHitCount(searchResponse, 1); Map<String, DocumentField> fields = searchResponse.getHits().getHits()[0].getFields(); assertThat(fields.get("test_field").getValue(), equalTo("foobar")); } public void testFieldsPulledFromFieldData() throws Exception { createIndex("test"); String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("_source") .field("enabled", false) .endObject() .startObject("properties") .startObject("text_field") .field("type", "text") .field("fielddata", true) .endObject() .startObject("keyword_field") .field("type", "keyword") .endObject() .startObject("byte_field") .field("type", "byte") .endObject() .startObject("short_field") .field("type", "short") .endObject() .startObject("integer_field") .field("type", "integer") .endObject() .startObject("long_field") .field("type", "long") .endObject() .startObject("float_field") .field("type", "float") .endObject() .startObject("double_field") .field("type", "double") .endObject() .startObject("date_field") .field("type", "date") .endObject() .startObject("boolean_field") .field("type", "boolean") .endObject() .startObject("binary_field") .field("type", "binary") .endObject() .startObject("ip_field") .field("type", "ip") .endObject() .endObject() .endObject() .endObject() .string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); ReadableDateTime date = new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("text_field", "foo") .field("keyword_field", "foo") .field("byte_field", (byte) 1) .field("short_field", (short) 2) .field("integer_field", 3) .field("long_field", 4L) .field("float_field", 5.0f) .field("double_field", 6.0d) .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(date)) .field("boolean_field", true) .field("ip_field", "::1") .endObject()).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) .addDocValueField("text_field") .addDocValueField("keyword_field") .addDocValueField("byte_field") .addDocValueField("short_field") .addDocValueField("integer_field") .addDocValueField("long_field") .addDocValueField("float_field") .addDocValueField("double_field") .addDocValueField("date_field") .addDocValueField("boolean_field") .addDocValueField("ip_field"); SearchResponse searchResponse = builder.execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); Set<String> fields = new HashSet<>(searchResponse.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", "float_field", "double_field", "date_field", "boolean_field", "text_field", "keyword_field", "ip_field"))); assertThat(searchResponse.getHits().getAt(0).getFields().get("byte_field").getValue().toString(), equalTo("1")); assertThat(searchResponse.getHits().getAt(0).getFields().get("short_field").getValue().toString(), equalTo("2")); assertThat(searchResponse.getHits().getAt(0).getFields().get("integer_field").getValue(), equalTo((Object) 3L)); assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L)); assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), equalTo(date)); assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true)); assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); } public void testScriptFields() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "s", "type=keyword", "l", "type=long", "d", "type=double", "ms", "type=keyword", "ml", "type=long", "md", "type=double").get()); final int numDocs = randomIntBetween(3, 8); List<IndexRequestBuilder> reqs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { reqs.add(client().prepareIndex("index", "type", Integer.toString(i)).setSource( "s", Integer.toString(i), "ms", new String[] {Integer.toString(i), Integer.toString(i+1)}, "l", i, "ml", new long[] {i, i+1}, "d", i, "md", new double[] {i, i+1})); } indexRandom(true, reqs); ensureSearchable(); SearchRequestBuilder req = client().prepareSearch("index"); for (String field : Arrays.asList("s", "ms", "l", "ml", "d", "md")) { req.addScriptField(field, new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + field + "'].values", Collections.emptyMap())); } SearchResponse resp = req.get(); assertSearchResponse(resp); for (SearchHit hit : resp.getHits().getHits()) { final int id = Integer.parseInt(hit.getId()); Map<String, DocumentField> fields = hit.getFields(); assertThat(fields.get("s").getValues(), equalTo(Collections.<Object> singletonList(Integer.toString(id)))); assertThat(fields.get("l").getValues(), equalTo(Collections.<Object> singletonList((long) id))); assertThat(fields.get("d").getValues(), equalTo(Collections.<Object> singletonList((double) id))); assertThat(fields.get("ms").getValues(), equalTo(Arrays.<Object> asList(Integer.toString(id), Integer.toString(id + 1)))); assertThat(fields.get("ml").getValues(), equalTo(Arrays.<Object> asList((long) id, id + 1L))); assertThat(fields.get("md").getValues(), equalTo(Arrays.<Object> asList((double) id, id + 1d))); } } public void testLoadMetadata() throws Exception { assertAcked(prepareCreate("test")); indexRandom(true, client().prepareIndex("test", "doc", "1") .setRouting("1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject())); SearchResponse response = client().prepareSearch("test").addStoredField("field1").get(); assertSearchResponse(response); assertHitCount(response, 1); Map<String, DocumentField> fields = response.getHits().getAt(0).getFields(); assertThat(fields.get("field1"), nullValue()); assertThat(fields.get("_routing").isMetadataField(), equalTo(true)); assertThat(fields.get("_routing").getValue().toString(), equalTo("1")); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kinesisvideosignalingchannels.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesis-video-signaling-2019-12-04/SendAlexaOfferToMaster" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SendAlexaOfferToMasterRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ARN of the signaling channel by which Alexa and the master peer communicate. * </p> */ private String channelARN; /** * <p> * The unique identifier for the sender client. * </p> */ private String senderClientId; /** * <p> * The base64-encoded SDP offer content. * </p> */ private String messagePayload; /** * <p> * The ARN of the signaling channel by which Alexa and the master peer communicate. * </p> * * @param channelARN * The ARN of the signaling channel by which Alexa and the master peer communicate. */ public void setChannelARN(String channelARN) { this.channelARN = channelARN; } /** * <p> * The ARN of the signaling channel by which Alexa and the master peer communicate. * </p> * * @return The ARN of the signaling channel by which Alexa and the master peer communicate. */ public String getChannelARN() { return this.channelARN; } /** * <p> * The ARN of the signaling channel by which Alexa and the master peer communicate. * </p> * * @param channelARN * The ARN of the signaling channel by which Alexa and the master peer communicate. * @return Returns a reference to this object so that method calls can be chained together. */ public SendAlexaOfferToMasterRequest withChannelARN(String channelARN) { setChannelARN(channelARN); return this; } /** * <p> * The unique identifier for the sender client. * </p> * * @param senderClientId * The unique identifier for the sender client. */ public void setSenderClientId(String senderClientId) { this.senderClientId = senderClientId; } /** * <p> * The unique identifier for the sender client. * </p> * * @return The unique identifier for the sender client. */ public String getSenderClientId() { return this.senderClientId; } /** * <p> * The unique identifier for the sender client. * </p> * * @param senderClientId * The unique identifier for the sender client. * @return Returns a reference to this object so that method calls can be chained together. */ public SendAlexaOfferToMasterRequest withSenderClientId(String senderClientId) { setSenderClientId(senderClientId); return this; } /** * <p> * The base64-encoded SDP offer content. * </p> * * @param messagePayload * The base64-encoded SDP offer content. */ public void setMessagePayload(String messagePayload) { this.messagePayload = messagePayload; } /** * <p> * The base64-encoded SDP offer content. * </p> * * @return The base64-encoded SDP offer content. */ public String getMessagePayload() { return this.messagePayload; } /** * <p> * The base64-encoded SDP offer content. * </p> * * @param messagePayload * The base64-encoded SDP offer content. * @return Returns a reference to this object so that method calls can be chained together. */ public SendAlexaOfferToMasterRequest withMessagePayload(String messagePayload) { setMessagePayload(messagePayload); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getChannelARN() != null) sb.append("ChannelARN: ").append(getChannelARN()).append(","); if (getSenderClientId() != null) sb.append("SenderClientId: ").append(getSenderClientId()).append(","); if (getMessagePayload() != null) sb.append("MessagePayload: ").append(getMessagePayload()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SendAlexaOfferToMasterRequest == false) return false; SendAlexaOfferToMasterRequest other = (SendAlexaOfferToMasterRequest) obj; if (other.getChannelARN() == null ^ this.getChannelARN() == null) return false; if (other.getChannelARN() != null && other.getChannelARN().equals(this.getChannelARN()) == false) return false; if (other.getSenderClientId() == null ^ this.getSenderClientId() == null) return false; if (other.getSenderClientId() != null && other.getSenderClientId().equals(this.getSenderClientId()) == false) return false; if (other.getMessagePayload() == null ^ this.getMessagePayload() == null) return false; if (other.getMessagePayload() != null && other.getMessagePayload().equals(this.getMessagePayload()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getChannelARN() == null) ? 0 : getChannelARN().hashCode()); hashCode = prime * hashCode + ((getSenderClientId() == null) ? 0 : getSenderClientId().hashCode()); hashCode = prime * hashCode + ((getMessagePayload() == null) ? 0 : getMessagePayload().hashCode()); return hashCode; } @Override public SendAlexaOfferToMasterRequest clone() { return (SendAlexaOfferToMasterRequest) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.security.authorization.plugin; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.HiveObjectType; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeScope; import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAccessController; import org.apache.hadoop.hive.ql.session.SessionState; public class HiveV1Authorizer extends AbstractHiveAuthorizer { private final HiveConf conf; static private final String AUTHORIZER = "v1"; public HiveV1Authorizer(HiveConf conf) { this.conf = conf; } // Leave this ctor around for backward compat. @Deprecated public HiveV1Authorizer(HiveConf conf, Hive hive) { this(conf); } @Override public VERSION getVersion() { return VERSION.V1; } @Override public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputsHObjs, List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { throw new UnsupportedOperationException("Should not be called for v1 authorizer"); } @Override public void grantPrivileges( List<HivePrincipal> principals, List<HivePrivilege> privileges, HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption) throws HiveAuthzPluginException, HiveAccessControlException { try { PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption, AUTHORIZER); grantOrRevokePrivs(principals, privBag, true, grantOption); } catch (Exception e) { throw new HiveAuthzPluginException(e); } } @Override public void revokePrivileges( List<HivePrincipal> principals, List<HivePrivilege> privileges, HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption) throws HiveAuthzPluginException, HiveAccessControlException { try { PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption, AUTHORIZER); grantOrRevokePrivs(principals, privBag, false, grantOption); } catch (Exception e) { throw new HiveAuthzPluginException(e); } } private void grantOrRevokePrivs(List<HivePrincipal> principals, PrivilegeBag privBag, boolean isGrant, boolean grantOption) throws HiveException { for (HivePrincipal principal : principals) { PrincipalType type = AuthorizationUtils.getThriftPrincipalType(principal.getType()); for (HiveObjectPrivilege priv : privBag.getPrivileges()) { priv.setPrincipalName(principal.getName()); priv.setPrincipalType(type); } Hive hive = Hive.getWithFastCheck(this.conf); if (isGrant) { hive.grantPrivileges(privBag); } else { hive.revokePrivileges(privBag, grantOption); } } } private PrivilegeBag toPrivilegeBag(List<HivePrivilege> privileges, HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption, String authorizer) throws HiveException { PrivilegeBag privBag = new PrivilegeBag(); if (privileges.isEmpty()) { return privBag; } String grantorName = grantor.getName(); PrincipalType grantorType = AuthorizationUtils.getThriftPrincipalType(grantor.getType()); if (privObject.getType() == null || privObject.getType() == HivePrivilegeObject.HivePrivilegeObjectType.GLOBAL) { for (HivePrivilege priv : privileges) { List<String> columns = priv.getColumns(); if (columns != null && !columns.isEmpty()) { throw new HiveException( "For user-level privileges, column sets should be null. columns=" + columns.toString()); } privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef( HiveObjectType.GLOBAL, null, null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantor.getName(), grantorType, grantOption), authorizer)); } return privBag; } if (privObject.getPartKeys() != null && grantOption) { throw new HiveException("Grant does not support partition level."); } Hive hive = Hive.getWithFastCheck(this.conf); Database dbObj = hive.getDatabase(privObject.getDbname()); if (dbObj == null) { throw new HiveException("Database " + privObject.getDbname() + " does not exists"); } Table tableObj = null; if (privObject.getObjectName() != null) { tableObj = hive.getTable(dbObj.getName(), privObject.getObjectName()); } List<String> partValues = null; if (tableObj != null) { if ((!tableObj.isPartitioned()) && privObject.getPartKeys() != null) { throw new HiveException( "Table is not partitioned, but partition name is present: partSpec=" + privObject.getPartKeys()); } if (privObject.getPartKeys() != null) { Map<String, String> partSpec = Warehouse.makeSpecFromValues(tableObj.getPartitionKeys(), privObject.getPartKeys()); Partition partObj = hive.getPartition(tableObj, partSpec, false).getTPartition(); partValues = partObj.getValues(); } } for (HivePrivilege priv : privileges) { List<String> columns = priv.getColumns(); if (columns != null && !columns.isEmpty()) { if (!priv.supportsScope(PrivilegeScope.COLUMN_LEVEL_SCOPE)) { throw new HiveException(priv.getName() + " does not support column level privilege."); } if (tableObj == null) { throw new HiveException( "For user-level/database-level privileges, column sets should be null. columns=" + columns); } for (int i = 0; i < columns.size(); i++) { privBag.addToPrivileges(new HiveObjectPrivilege( new HiveObjectRef(HiveObjectType.COLUMN, dbObj.getName(), tableObj.getTableName(), partValues, columns.get(i)), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer)); } } else if (tableObj == null) { privBag.addToPrivileges(new HiveObjectPrivilege( new HiveObjectRef(HiveObjectType.DATABASE, dbObj.getName(), null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer)); } else if (partValues == null) { privBag.addToPrivileges(new HiveObjectPrivilege( new HiveObjectRef(HiveObjectType.TABLE, dbObj.getName(), tableObj.getTableName(), null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer)); } else { privBag.addToPrivileges(new HiveObjectPrivilege( new HiveObjectRef(HiveObjectType.PARTITION, dbObj.getName(), tableObj.getTableName(), partValues, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer)); } } return privBag; } @Override public void createRole(String roleName, HivePrincipal adminGrantor) throws HiveAuthzPluginException, HiveAccessControlException { try { Hive hive = Hive.getWithFastCheck(this.conf); hive.createRole(roleName, adminGrantor == null ? null : adminGrantor.getName()); } catch (HiveException e) { throw new HiveAuthzPluginException(e); } } @Override public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException { try { Hive hive = Hive.getWithFastCheck(this.conf); hive.dropRole(roleName); } catch (HiveException e) { throw new HiveAuthzPluginException(e); } } @Override public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException { try { Hive hive = Hive.getWithFastCheck(this.conf); return SQLStdHiveAccessController.getHiveRoleGrants(hive.getMSC(), roleName); } catch (Exception e) { throw new HiveAuthzPluginException(e); } } @Override public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal) throws HiveAuthzPluginException, HiveAccessControlException { PrincipalType type = AuthorizationUtils.getThriftPrincipalType(principal.getType()); try { List<HiveRoleGrant> grants = new ArrayList<HiveRoleGrant>(); Hive hive = Hive.getWithFastCheck(this.conf); for (RolePrincipalGrant grant : hive.getRoleGrantInfoForPrincipal(principal.getName(), type)) { grants.add(new HiveRoleGrant(grant)); } return grants; } catch (HiveException e) { throw new HiveAuthzPluginException(e); } } @Override public void grantRole(List<HivePrincipal> principals, List<String> roles, boolean grantOption, HivePrincipal grantor) throws HiveAuthzPluginException, HiveAccessControlException { try { grantOrRevokeRole(principals, roles, grantOption, grantor, true); } catch (HiveException e) { throw new HiveAuthzPluginException(e); } } @Override public void revokeRole(List<HivePrincipal> principals, List<String> roles, boolean grantOption, HivePrincipal grantor) throws HiveAuthzPluginException, HiveAccessControlException { try { grantOrRevokeRole(principals, roles, grantOption, grantor, false); } catch (HiveException e) { throw new HiveAuthzPluginException(e); } } private void grantOrRevokeRole(List<HivePrincipal> principals, List<String> roles, boolean grantOption, HivePrincipal grantor, boolean isGrant) throws HiveException { PrincipalType grantorType = AuthorizationUtils.getThriftPrincipalType(grantor.getType()); Hive hive = Hive.getWithFastCheck(this.conf); for (HivePrincipal principal : principals) { PrincipalType principalType = AuthorizationUtils.getThriftPrincipalType(principal.getType()); String userName = principal.getName(); for (String roleName : roles) { if (isGrant) { hive.grantRole(roleName, userName, principalType, grantor.getName(), grantorType, grantOption); } else { hive.revokeRole(roleName, userName, principalType, grantOption); } } } } @Override public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException { try { Hive hive = Hive.getWithFastCheck(this.conf); return hive.getAllRoleNames(); } catch (HiveException e) { throw new HiveAuthzPluginException(e); } } @Override public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException, HiveAccessControlException { String name = principal == null ? null : principal.getName(); PrincipalType type = AuthorizationUtils.getThriftPrincipalType(principal == null ? null : principal.getType()); List<HiveObjectPrivilege> privs = new ArrayList<HiveObjectPrivilege>(); try { Hive hive = Hive.getWithFastCheck(this.conf); if (privObj == null) { // show user level privileges privs.addAll(hive.showPrivilegeGrant(HiveObjectType.GLOBAL, name, type, null, null, null, null)); } else if (privObj.getDbname() == null) { // show all privileges privs.addAll(hive.showPrivilegeGrant(null, name, type, null, null, null, null)); } else { Database dbObj = hive.getDatabase(privObj.getDbname());; if (dbObj == null) { throw new HiveException("Database " + privObj.getDbname() + " does not exists"); } Table tableObj = null; if (privObj.getObjectName() != null) { tableObj = hive.getTable(dbObj.getName(), privObj.getObjectName()); } List<String> partValues = privObj.getPartKeys(); if (tableObj == null) { // show database level privileges privs.addAll(hive.showPrivilegeGrant(HiveObjectType.DATABASE, name, type, dbObj.getName(), null, null, null)); } else { List<String> columns = privObj.getColumns(); if (columns != null && !columns.isEmpty()) { // show column level privileges for (String columnName : columns) { privs.addAll(hive.showPrivilegeGrant(HiveObjectType.COLUMN, name, type, dbObj.getName(), tableObj.getTableName(), partValues, columnName)); } } else if (partValues == null) { // show table level privileges privs.addAll(hive.showPrivilegeGrant(HiveObjectType.TABLE, name, type, dbObj.getName(), tableObj.getTableName(), null, null)); } else { // show partition level privileges privs.addAll(hive.showPrivilegeGrant(HiveObjectType.PARTITION, name, type, dbObj.getName(), tableObj.getTableName(), partValues, null)); } } } return AuthorizationUtils.getPrivilegeInfos(privs); } catch (Exception ex) { throw new HiveAuthzPluginException(ex); } } @Override public void setCurrentRole(String roleName) throws HiveAccessControlException, HiveAuthzPluginException { throw new HiveAuthzPluginException("Unsupported operation 'setCurrentRole' for V1 auth"); } @Override public List<String> getCurrentRoleNames() throws HiveAuthzPluginException { String userName = SessionState.get().getUserName(); if (userName == null) { userName = SessionState.getUserFromAuthenticator(); } if (userName == null) { throw new HiveAuthzPluginException("Cannot resolve current user name"); } try { Hive hive = Hive.getWithFastCheck(this.conf); List<String> roleNames = new ArrayList<String>(); for (Role role : hive.listRoles(userName, PrincipalType.USER)) { roleNames.add(role.getRoleName()); } return roleNames; } catch (HiveException e) { throw new HiveAuthzPluginException(e); } } @Override public void applyAuthorizationConfigPolicy(HiveConf hiveConf) { } @Override public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { // do no filtering in old authorizer return listObjs; } @Override public boolean needTransform() { return false; } @Override public List<HivePrivilegeObject> applyRowFilterAndColumnMasking(HiveAuthzContext context, List<HivePrivilegeObject> privObjs) throws SemanticException { return null; } }
/** * Copyright (c) 2010 Yahoo! Inc. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. See accompanying LICENSE file. */ package org.apache.oozie.action.hadoop; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.OutputStream; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Properties; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobID; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.oozie.WorkflowActionBean; import org.apache.oozie.WorkflowJobBean; import org.apache.oozie.action.ActionExecutor; import org.apache.oozie.action.ActionExecutorException; import org.apache.oozie.client.OozieClient; import org.apache.oozie.client.WorkflowAction; import org.apache.oozie.client.WorkflowJob; import org.apache.oozie.service.Services; import org.apache.oozie.service.UUIDService; import org.apache.oozie.service.WorkflowAppService; import org.apache.oozie.service.WorkflowStoreService; import org.apache.oozie.util.IOUtils; import org.apache.oozie.util.XConfiguration; import org.apache.oozie.util.XmlUtils; import org.apache.oozie.workflow.WorkflowApp; import org.apache.oozie.workflow.WorkflowInstance; import org.apache.oozie.workflow.WorkflowLib; import org.apache.oozie.workflow.lite.EndNodeDef; import org.apache.oozie.workflow.lite.LiteWorkflowApp; import org.apache.oozie.workflow.lite.StartNodeDef; import org.jdom.Element; public class TestJavaActionExecutor extends ActionExecutorTestCase { @Override protected void setSystemProps() { super.setSystemProps(); setSystemProperty("oozie.service.ActionService.executor.classes", JavaActionExecutor.class.getName()); } public void testLauncherJar() throws Exception { JavaActionExecutor ae = new JavaActionExecutor(); Path jar = new Path(ae.getOozieRuntimeDir(), ae.getLauncherJarName()); assertTrue(new File(jar.toString()).exists()); } public void testSetupMethods() throws Exception { JavaActionExecutor ae = new JavaActionExecutor(); assertEquals("java", ae.getType()); assertEquals("java-launcher.jar", ae.getLauncherJarName()); List<Class> classes = new ArrayList<Class>(); classes.add(LauncherMapper.class); classes.add(LauncherSecurityManager.class); classes.add(LauncherException.class); classes.add(LauncherMainException.class); assertEquals(classes, ae.getLauncherClasses()); Configuration conf = new XConfiguration(); conf.set("user.name", "a"); try { ae.checkForDisallowedProps(conf, "x"); fail(); } catch (ActionExecutorException ex) { } conf = new XConfiguration(); conf.set("hadoop.job.ugi", "a"); try { ae.checkForDisallowedProps(conf, "x"); fail(); } catch (ActionExecutorException ex) { } conf = new XConfiguration(); conf.set("mapred.job.tracker", "a"); try { ae.checkForDisallowedProps(conf, "x"); fail(); } catch (ActionExecutorException ex) { } conf = new XConfiguration(); conf.set("fs.default.name", "a"); try { ae.checkForDisallowedProps(conf, "x"); fail(); } catch (ActionExecutorException ex) { } conf = new XConfiguration(); conf.set("a", "a"); try { ae.checkForDisallowedProps(conf, "x"); } catch (ActionExecutorException ex) { fail(); } Element actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<job-xml>job.xml</job-xml>" + "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>" + "<property><name>a</name><value>AA</value></property>" + "<property><name>b</name><value>BB</value></property>" + "</configuration>" + "<main-class>MAIN-CLASS</main-class>" + "<java-opts>JAVA-OPTS</java-opts>" + "<arg>A1</arg>" + "<arg>A2</arg>" + "<file>f.jar</file>" + "<archive>a.tar</archive>" + "</java>"); Path appPath = new Path(getFsTestCaseDir(), "wf"); Path appJarPath = new Path("lib/a.jar"); getFileSystem().create(new Path(appPath, appJarPath)).close(); Path appSoPath = new Path("lib/a.so"); getFileSystem().create(new Path(appPath, appSoPath)).close(); Path appSo1Path = new Path("lib/a.so.1"); String expectedSo1Path = "lib/a.so.1#a.so.1"; getFileSystem().create(new Path(appPath, appSo1Path)).close(); Path filePath = new Path("f.jar"); getFileSystem().create(new Path(appPath, filePath)).close(); Path archivePath = new Path("a.tar"); getFileSystem().create(new Path(appPath, archivePath)).close(); XConfiguration protoConf = new XConfiguration(); protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser()); protoConf.set(WorkflowAppService.HADOOP_UGI, getTestUser() + "," + getTestGroup()); protoConf.set(OozieClient.GROUP_NAME, getTestGroup()); protoConf.setStrings(WorkflowAppService.APP_LIB_PATH_LIST, appJarPath.toString(), appSoPath.toString()); injectKerberosInfo(protoConf); WorkflowJobBean wf = createBaseWorkflow(protoConf, "action"); WorkflowActionBean action = (WorkflowActionBean) wf.getActions().get(0); action.setType(ae.getType()); Context context = new Context(wf, action); conf = new XConfiguration(); conf.set("c", "C"); conf.set("oozie.launcher.d", "D"); OutputStream os = getFileSystem().create(new Path(getFsTestCaseDir(), "job.xml")); conf.writeXml(os); os.close(); conf = ae.createBaseHadoopConf(context, actionXml); assertEquals(protoConf.get(WorkflowAppService.HADOOP_USER), conf.get(WorkflowAppService.HADOOP_USER)); assertEquals(protoConf.get(WorkflowAppService.HADOOP_UGI), conf.get(WorkflowAppService.HADOOP_UGI)); assertEquals(getJobTrackerUri(), conf.get("mapred.job.tracker")); assertEquals(getNameNodeUri(), conf.get("fs.default.name")); conf = ae.createBaseHadoopConf(context, actionXml); ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context); assertEquals("LA", conf.get("oozie.launcher.a")); assertEquals("LA", conf.get("a")); assertNull(conf.get("b")); assertNull(conf.get("oozie.launcher.d")); assertNull(conf.get("d")); conf = ae.createBaseHadoopConf(context, actionXml); ae.setupActionConf(conf, context, actionXml, getFsTestCaseDir()); assertEquals("LA", conf.get("oozie.launcher.a")); assertEquals("AA", conf.get("a")); assertEquals("BB", conf.get("b")); assertEquals("C", conf.get("c")); assertEquals("D", conf.get("oozie.launcher.d")); conf = ae.createBaseHadoopConf(context, actionXml); ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context); ae.addToCache(conf, appPath, appJarPath.toString(), false); assertTrue(conf.get("mapred.job.classpath.files").contains(appJarPath.toUri().getPath())); ae.addToCache(conf, appPath, appSoPath.toString(), false); assertTrue(conf.get("mapred.cache.files").contains(appSoPath.toUri().getPath())); ae.addToCache(conf, appPath, appSo1Path.toString(), false); assertTrue(conf.get("mapred.cache.files").contains(expectedSo1Path)); assertTrue(ae.getOozieLauncherJar(context).startsWith(context.getActionDir().toString())); assertTrue(ae.getOozieLauncherJar(context).endsWith(ae.getLauncherJarName())); assertFalse(getFileSystem().exists(context.getActionDir())); ae.prepareActionDir(getFileSystem(), context); assertTrue(getFileSystem().exists(context.getActionDir())); assertTrue(getFileSystem().exists(new Path(context.getActionDir(), ae.getLauncherJarName()))); ae.cleanUpActionDir(getFileSystem(), context); assertFalse(getFileSystem().exists(context.getActionDir())); conf = ae.createBaseHadoopConf(context, actionXml); ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context); ae.setLibFilesArchives(context, actionXml, appPath, conf); assertTrue(conf.get("mapred.cache.files").contains(filePath.toUri().getPath())); assertTrue(conf.get("mapred.cache.archives").contains(archivePath.toUri().getPath())); conf = ae.createBaseHadoopConf(context, actionXml); ae.setupActionConf(conf, context, actionXml, getFsTestCaseDir()); ae.setLibFilesArchives(context, actionXml, appPath, conf); assertTrue(conf.get("mapred.cache.files").contains(filePath.toUri().getPath())); assertTrue(conf.get("mapred.cache.archives").contains(archivePath.toUri().getPath())); Configuration actionConf = ae.createBaseHadoopConf(context, actionXml); ae.setupActionConf(actionConf, context, actionXml, getFsTestCaseDir()); conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf); ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context); assertEquals("MAIN-CLASS", ae.getLauncherMain(conf, actionXml)); assertTrue(conf.get("mapred.child.java.opts").contains("JAVA-OPTS")); assertEquals(Arrays.asList("A1", "A2"), Arrays.asList(LauncherMapper.getMainArguments(conf))); assertTrue(getFileSystem().exists(new Path(context.getActionDir(), LauncherMapper.ACTION_CONF_XML))); actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node> <configuration>" + "<property><name>mapred.job.queue.name</name><value>AQ</value></property>" + "</configuration>" + "<main-class>MAIN-CLASS</main-class>" + "</java>"); actionConf = ae.createBaseHadoopConf(context, actionXml); ae.setupActionConf(actionConf, context, actionXml, appPath); conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf); assertEquals("AQ", conf.get("mapred.job.queue.name")); assertEquals("AQ", actionConf.get("mapred.job.queue.name")); actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node> <configuration>" + "<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>" + "</configuration>" + "<main-class>MAIN-CLASS</main-class>" + "</java>"); actionConf = ae.createBaseHadoopConf(context, actionXml); ae.setupActionConf(actionConf, context, actionXml, appPath); conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf); assertEquals("LQ", conf.get("mapred.job.queue.name")); actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node> <configuration>" + "<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>" + "<property><name>mapred.job.queue.name</name><value>AQ</value></property>" + "</configuration>" + "<main-class>MAIN-CLASS</main-class>" + "</java>"); actionConf = ae.createBaseHadoopConf(context, actionXml); ae.setupActionConf(actionConf, context, actionXml, appPath); conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf); assertEquals("LQ", conf.get("mapred.job.queue.name")); assertEquals("AQ", actionConf.get("mapred.job.queue.name")); } protected Context createContext(String actionXml) throws Exception { JavaActionExecutor ae = new JavaActionExecutor(); Path appJarPath = new Path("lib/test.jar"); File jarFile = IOUtils.createJar(new File(getTestCaseDir()), "test.jar", LauncherMainTester.class); InputStream is = new FileInputStream(jarFile); OutputStream os = getFileSystem().create(new Path(getAppPath(), "lib/test.jar")); IOUtils.copyStream(is, os); Path appSoPath = new Path("lib/test.so"); getFileSystem().create(new Path(getAppPath(), appSoPath)).close(); XConfiguration protoConf = new XConfiguration(); protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser()); protoConf.set(WorkflowAppService.HADOOP_UGI, getTestUser() + "," + getTestGroup()); protoConf.set(OozieClient.GROUP_NAME, getTestGroup()); protoConf.setStrings(WorkflowAppService.APP_LIB_PATH_LIST, appJarPath.toString(), appSoPath.toString()); injectKerberosInfo(protoConf); WorkflowJobBean wf = createBaseWorkflow(protoConf, "action"); WorkflowActionBean action = (WorkflowActionBean) wf.getActions().get(0); action.setType(ae.getType()); action.setConf(actionXml); return new Context(wf, action); } protected RunningJob submitAction(Context context) throws Exception { JavaActionExecutor ae = new JavaActionExecutor(); WorkflowAction action = context.getAction(); ae.prepareActionDir(getFileSystem(), context); ae.submitLauncher(getFileSystem(), context, action); String jobId = action.getExternalId(); String jobTracker = action.getTrackerUri(); String consoleUrl = action.getConsoleUrl(); assertNotNull(jobId); assertNotNull(jobTracker); assertNotNull(consoleUrl); JobConf jobConf = new JobConf(); jobConf.set("mapred.job.tracker", jobTracker); injectKerberosInfo(jobConf); JobClient jobClient = new JobClient(jobConf); final RunningJob runningJob = jobClient.getJob(JobID.forName(jobId)); assertNotNull(runningJob); return runningJob; } public void testSimpestSleSubmitOK() throws Exception { String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); ActionExecutor ae = new JavaActionExecutor(); ae.check(context, context.getAction()); assertEquals("SUCCEEDED", context.getAction().getExternalStatus()); assertNull(context.getAction().getData()); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus()); } public void testOutputSubmitOK() throws Exception { String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "<arg>out</arg>" + "<capture-output/>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); ActionExecutor ae = new JavaActionExecutor(); ae.check(context, context.getAction()); assertEquals("SUCCEEDED", context.getAction().getExternalStatus()); assertNotNull(context.getAction().getData()); StringReader sr = new StringReader(context.getAction().getData()); Properties props = new Properties(); props.load(sr); assertEquals("A", props.get("a")); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus()); } public void testIdSwapSubmitOK() throws Exception { String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "<arg>id</arg>" + "<capture-output/>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); ActionExecutor ae = new JavaActionExecutor(); try { ae.check(context, context.getAction()); } catch (ActionExecutorException ex) { if (!ex.getMessage().contains("IDSWAP")) { fail(); } } } public void testAdditionalJarSubmitOK() throws Exception { Path appJarPath = new Path("test-extra.jar"); File jarFile = IOUtils.createJar(new File(getTestCaseDir()), appJarPath.getName(), LauncherMainTester2.class); InputStream is = new FileInputStream(jarFile); OutputStream os = getFileSystem().create(new Path(getAppPath(), appJarPath.toString())); IOUtils.copyStream(is, os); String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester2.class.getName() + "</main-class>" + "<file>" + appJarPath.toString() + "</file>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); ActionExecutor ae = new JavaActionExecutor(); assertFalse(ae.isCompleted(context.getAction().getExternalStatus())); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); ae.check(context, context.getAction()); assertEquals("SUCCEEDED", context.getAction().getExternalStatus()); assertNull(context.getAction().getData()); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus()); } public void testExit0SubmitOK() throws Exception { String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "<arg>exit0</arg>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); ActionExecutor ae = new JavaActionExecutor(); ae.check(context, context.getAction()); assertTrue(ae.isCompleted(context.getAction().getExternalStatus())); assertEquals("SUCCEEDED", context.getAction().getExternalStatus()); assertNull(context.getAction().getData()); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus()); } public void testExit1SubmitError() throws Exception { String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "<arg>exit1</arg>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); assertFalse(LauncherMapper.isMainSuccessful(runningJob)); ActionExecutor ae = new JavaActionExecutor(); ae.check(context, context.getAction()); assertTrue(ae.isCompleted(context.getAction().getExternalStatus())); assertEquals("FAILED/KILLED", context.getAction().getExternalStatus()); assertEquals("1", context.getAction().getErrorCode()); assertNull(context.getAction().getData()); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus()); } public void testExceptionSubmitError() throws Exception { String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "<arg>ex</arg>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); assertFalse(LauncherMapper.isMainSuccessful(runningJob)); ActionExecutor ae = new JavaActionExecutor(); ae.check(context, context.getAction()); assertTrue(ae.isCompleted(context.getAction().getExternalStatus())); assertEquals("FAILED/KILLED", context.getAction().getExternalStatus()); assertNull(context.getAction().getData()); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus()); } public void testKill() throws Exception { String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "</java>"; final Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); assertFalse(runningJob.isComplete()); ActionExecutor ae = new JavaActionExecutor(); ae.kill(context, context.getAction()); assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus()); assertEquals("KILLED", context.getAction().getExternalStatus()); assertTrue(ae.isCompleted(context.getAction().getExternalStatus())); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertFalse(runningJob.isSuccessful()); } public void testRecovery() throws Exception { final String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "</java>"; final Context context = createContext(actionXml); RunningJob runningJob = submitAction(context); String launcherId = context.getAction().getExternalId(); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { JavaActionExecutor ae = new JavaActionExecutor(); Configuration conf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml)); return LauncherMapper.getRecoveryId(conf, context.getActionDir(), context.getRecoveryId()) != null; } }); final RunningJob runningJob2 = submitAction(context); assertEquals(launcherId, runningJob2.getJobID().toString()); assertEquals(launcherId, context.getAction().getExternalId()); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob2.isComplete(); } }); assertTrue(runningJob.isSuccessful()); ActionExecutor ae = new JavaActionExecutor(); ae.check(context, context.getAction()); assertEquals("SUCCEEDED", context.getAction().getExternalStatus()); assertNull(context.getAction().getData()); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus()); } public void testLibFileArchives() throws Exception { Path root = new Path(getFsTestCaseDir(), "root"); Path jar = new Path("jar.jar"); getFileSystem().create(new Path(getAppPath(), jar)).close(); Path rootJar = new Path(root, "rootJar.jar"); getFileSystem().create(rootJar).close(); Path file = new Path("file"); getFileSystem().create(new Path(getAppPath(), file)).close(); Path rootFile = new Path(root, "rootFile"); getFileSystem().create(rootFile).close(); Path so = new Path("soFile.so"); getFileSystem().create(new Path(getAppPath(), so)).close(); Path rootSo = new Path(root, "rootSoFile.so"); getFileSystem().create(rootSo).close(); Path so1 = new Path("soFile.so.1"); getFileSystem().create(new Path(getAppPath(), so1)).close(); Path rootSo1 = new Path(root, "rootSoFile.so.1"); getFileSystem().create(rootSo1).close(); Path archive = new Path("archive.tar"); getFileSystem().create(new Path(getAppPath(), archive)).close(); Path rootArchive = new Path(root, "rootArchive.tar"); getFileSystem().create(rootArchive).close(); String actionXml = "<map-reduce xmlns='uri:oozie:workflow:0.1'>" + " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" + " <name-node>" + getNameNodeUri() + "</name-node>" + " <main-class>CLASS</main-class>" + " <file>" + jar.toString() + "</file>\n" + " <file>" + rootJar.toString() + "</file>\n" + " <file>" + file.toString() + "</file>\n" + " <file>" + rootFile.toString() + "</file>\n" + " <file>" + so.toString() + "</file>\n" + " <file>" + rootSo.toString() + "</file>\n" + " <file>" + so1.toString() + "</file>\n" + " <file>" + rootSo1.toString() + "</file>\n" + " <archive>" + archive.toString() + "</archive>\n" + " <archive>" + rootArchive.toString() + "</archive>\n" + "</map-reduce>"; Element eActionXml = XmlUtils.parseXml(actionXml); Context context = createContext(actionXml); Path appPath = getAppPath(); JavaActionExecutor ae = new JavaActionExecutor(); Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml); ae.setupActionConf(jobConf, context, eActionXml, appPath); ae.setLibFilesArchives(context, eActionXml, appPath, jobConf); assertTrue(DistributedCache.getSymlink(jobConf)); // 1 launcher JAR, 1 wf lib JAR, 2 <file> JARs assertEquals(4, DistributedCache.getFileClassPaths(jobConf).length); // #CLASSPATH_ENTRIES# 4, 1 wf lib sos, 4 <file> sos, 2 <file> files assertEquals(11, DistributedCache.getCacheFiles(jobConf).length); // 2 <archive> files assertEquals(2, DistributedCache.getCacheArchives(jobConf).length); } public void testPrepare() throws Exception { FileSystem fs = getFileSystem(); Path mkdir = new Path(getFsTestCaseDir(), "mkdir"); Path delete = new Path(getFsTestCaseDir(), "delete"); fs.mkdirs(delete); String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<prepare>" + "<mkdir path='" + mkdir + "'/>" + "<delete path='" + delete + "'/>" + "</prepare>" + "<main-class>" + LauncherMainTester.class.getName() + "</main-class>" + "</java>"; Context context = createContext(actionXml); final RunningJob runningJob = submitAction(context); waitFor(60 * 1000, new Predicate() { public boolean evaluate() throws Exception { return runningJob.isComplete(); } }); assertTrue(runningJob.isSuccessful()); ActionExecutor ae = new JavaActionExecutor(); ae.check(context, context.getAction()); assertEquals("SUCCEEDED", context.getAction().getExternalStatus()); assertNull(context.getAction().getData()); ae.end(context, context.getAction()); assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus()); assertTrue(fs.exists(mkdir)); assertFalse(fs.exists(delete)); } public void testCredentialsModule() throws Exception { String actionXml = "<workflow-app xmlns='uri:oozie:workflow:0.2.5' name='pig-wf'>" + "<credentials>" + "<credential name='abcname' type='abc'>" + "<property>" + "<name>property1</name>" + "<value>value1</value>" + "</property>" + "<property>" + "<name>property2</name>" + "<value>value2</value>" + "</property>" + "</credential>" + "</credentials>" + "<start to='pig1' />" + "<action name='pig1' cred='abcname'>" + "<pig>" + "</pig>" + "<ok to='end' />" + "<error to='fail' />" + "</action>" + "<kill name='fail'>" + "<message>Pig failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>" + "</kill>" + "<end name='end' />" + "</workflow-app>"; JavaActionExecutor ae = new JavaActionExecutor(); WorkflowJobBean wfBean = addRecordToWfJobTable("test1", actionXml); WorkflowActionBean action = (WorkflowActionBean) wfBean.getActions().get(0); action.setType(ae.getType()); action.setCred("abcname"); String actionxml = "<pig>" + "<job-tracker>${jobTracker}</job-tracker>" + "<name-node>${nameNode}</name-node>" + "<prepare>" + "<delete path='outputdir' />" + "</prepare>" + "<configuration>" + "<property>" + "<name>mapred.compress.map.output</name>" + "<value>true</value>" + "</property>" + "<property>" + "<name>mapred.job.queue.name</name>" + "<value>${queueName}</value>" + "</property>" + "</configuration>" + "<script>org/apache/oozie/examples/pig/id.pig</script>" + "<param>INPUT=${inputDir}</param>" + "<param>OUTPUT=${outputDir}/pig-output</param>" + "</pig>"; action.setConf(actionxml); Context context = new Context(wfBean, action); Element actionXmlconf = XmlUtils.parseXml(action.getConf()); // action job configuration Configuration actionConf = ae.createBaseHadoopConf(context, actionXmlconf); // Setting the credential properties in launcher conf HashMap<String, CredentialsProperties> credProperties = ae.setCredentialPropertyToActionConf(context, action, actionConf); CredentialsProperties prop = credProperties.get("abcname"); assertEquals("value1", prop.getProperties().get("property1")); assertEquals("value2", prop.getProperties().get("property2")); Configuration conf = Services.get().getConf(); conf.set("oozie.credentials.credentialclasses", "abc=org.apache.oozie.action.hadoop.InsertTestToken"); // Adding if action need to set more credential tokens JobConf credentialsConf = new JobConf(); Configuration launcherConf = ae.createBaseHadoopConf(context, actionXmlconf); XConfiguration.copy(launcherConf, credentialsConf); ae.setCredentialTokens(credentialsConf, context, action, credProperties); Token<? extends TokenIdentifier> tk = credentialsConf.getCredentials().getToken(new Text("ABC Token")); assertNotNull(tk); } private WorkflowJobBean addRecordToWfJobTable(String wfId, String wfxml) throws Exception { WorkflowApp app = new LiteWorkflowApp("testApp", wfxml, new StartNodeDef("start")) .addNode(new EndNodeDef("end")); Configuration conf = new Configuration(); conf.set(OozieClient.APP_PATH, "testPath"); conf.set(OozieClient.LOG_TOKEN, "testToken"); conf.set(OozieClient.USER_NAME, getTestUser()); conf.set(OozieClient.GROUP_NAME, getTestGroup()); injectKerberosInfo(conf); WorkflowJobBean wfBean = createWorkflow(app, conf, "auth"); wfBean.setId(wfId); wfBean.setStatus(WorkflowJob.Status.SUCCEEDED); WorkflowActionBean action = new WorkflowActionBean(); action.setName("test"); action.setCred("null"); action.setId(Services.get().get(UUIDService.class).generateChildId(wfBean.getId(), "test")); wfBean.getActions().add(action); return wfBean; } private WorkflowJobBean createWorkflow(WorkflowApp app, Configuration conf, String authToken) throws Exception { WorkflowAppService wps = Services.get().get(WorkflowAppService.class); Configuration protoActionConf = wps.createProtoActionConf(conf, authToken, true); WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB(); WorkflowInstance wfInstance; wfInstance = workflowLib.createInstance(app, conf); WorkflowJobBean workflow = new WorkflowJobBean(); workflow.setId(wfInstance.getId()); workflow.setAppName(app.getName()); workflow.setAppPath(conf.get(OozieClient.APP_PATH)); workflow.setConf(XmlUtils.prettyPrint(conf).toString()); workflow.setProtoActionConf(XmlUtils.prettyPrint(protoActionConf).toString()); workflow.setCreatedTime(new Date()); workflow.setLogToken(conf.get(OozieClient.LOG_TOKEN, "")); workflow.setStatus(WorkflowJob.Status.PREP); workflow.setRun(0); workflow.setUser(conf.get(OozieClient.USER_NAME)); workflow.setGroup(conf.get(OozieClient.GROUP_NAME)); workflow.setAuthToken(authToken); workflow.setWorkflowInstance(wfInstance); return workflow; } }
/****************************************************************** * * CyberX3D for Java * * Copyright (C) Satoshi Konno 1997-2002 * * File: MultiTextureNode.java * * Revisions: * * 12/05/02 * - The first revision. * ******************************************************************/ package org.cybergarage.x3d.node; import java.io.PrintWriter; import org.cybergarage.x3d.*; import org.cybergarage.x3d.field.*; public class MultiTextureNode extends TextureNode { private final static String materialColorFieldString = "materialColor"; private final static String materialAlphaFieldString = "materialAlpha"; private final static String transparentFieldString = "transparent"; private final static String nomipmapFieldString = "nomipmap"; private static final String modeFieldString = "mode"; private final static String textureFieldString = "texture"; private final static String textureTransformFieldString = "textureTransform"; private final static String alphaFieldString = "alpha"; private final static String colorFieldString = "color"; private SFBool materialColorField; private SFBool materialAlphaField; private SFBool transparentField; private SFBool nomipmapField; private MFString modeField; private SFNode textureField; private SFNode texTransformField; private SFFloat alphaField; private SFColor colorField; public MultiTextureNode() { setHeaderFlag(false); setType(NodeType.MULTITEXTURE); // materialColor exposed field materialColorField = new SFBool(true); materialColorField.setName(materialColorFieldString); addExposedField(materialColorField); // materialAlpha exposed field materialAlphaField = new SFBool(true); materialAlphaField.setName(materialAlphaFieldString); addExposedField(materialAlphaField); // transparent exposed field transparentField = new SFBool(true); transparentField.setName(transparentFieldString); addExposedField(transparentField); // nomipmap exposed field nomipmapField = new SFBool(true); nomipmapField.setName(nomipmapFieldString); addExposedField(nomipmapField); // mode exposed field modeField = new MFString(); modeField.setName(modeFieldString); addExposedField(modeField); // texture exposed field textureField = new SFNode(); textureField.setName(textureFieldString); addExposedField(textureField); // textureTransform exposed field texTransformField = new SFNode(); texTransformField.setName(textureTransformFieldString); addExposedField(texTransformField); // alpha exposed field alphaField = new SFFloat(1.0f); alphaField.setName(alphaFieldString); addExposedField(alphaField); // color exposed field colorField = new SFColor(1.0f, 1.0f, 1.0f); colorField.setName(colorFieldString); addExposedField(colorField); } //////////////////////////////////////////////// // SFNode Fields //////////////////////////////////////////////// public SFNode getTextureField() { if (isInstanceNode() == false) return textureField; return (SFNode)getExposedField(textureFieldString); } public SFNode getTextureTransformField() { if (isInstanceNode() == false) return texTransformField; return (SFNode)getExposedField(textureTransformFieldString); } //////////////////////////////////////////////// // materialColor //////////////////////////////////////////////// public SFBool getMaterialColorField() { if (isInstanceNode() == false) return materialColorField; return (SFBool)getExposedField(materialColorFieldString); } public void setMaterialColor(boolean on) { getMaterialColorField().setValue(on); } public boolean getMaterialColor() { return getMaterialColorField().getValue(); } public boolean isMaterialColor() { return getMaterialColorField().getValue(); } //////////////////////////////////////////////// // materialAlpha //////////////////////////////////////////////// public SFBool getMaterialAlphaField() { if (isInstanceNode() == false) return materialAlphaField; return (SFBool)getExposedField(materialAlphaFieldString); } public void setMaterialAlpha(boolean on) { getMaterialAlphaField().setValue(on); } public boolean getMaterialAlpha() { return getMaterialAlphaField().getValue(); } public boolean isMaterialAlpha() { return getMaterialAlphaField().getValue(); } //////////////////////////////////////////////// // transparent //////////////////////////////////////////////// public SFBool getTransparentField() { if (isInstanceNode() == false) return transparentField; return (SFBool)getExposedField(transparentFieldString); } public void setTransparent(boolean on) { getTransparentField().setValue(on); } public boolean getTransparent() { return getTransparentField().getValue(); } public boolean isTransparent() { return getTransparentField().getValue(); } //////////////////////////////////////////////// // nomipmap //////////////////////////////////////////////// public SFBool getNomipmapField() { if (isInstanceNode() == false) return nomipmapField; return (SFBool)getExposedField(nomipmapFieldString); } public void setNomipmap(boolean on) { getNomipmapField().setValue(on); } public boolean getNomipmap() { return getNomipmapField().getValue(); } public boolean isNomipmap() { return getNomipmapField().getValue(); } //////////////////////////////////////////////// // Mode //////////////////////////////////////////////// public MFString getModeField() { if (isInstanceNode() == false) return modeField; return (MFString)getExposedField(modeFieldString); } public void addMode(String value) { getModeField().addValue(value); } public int getNModes() { return getModeField().getSize(); } public void setMode(int index, String value) { getModeField().set1Value(index, value); } public void setModes(String value) { getModeField().setValues(value); } //////////////////////////////////////////////// // Alpha //////////////////////////////////////////////// public SFFloat getAlphaField() { if (isInstanceNode() == false) return alphaField; return (SFFloat)getExposedField(alphaFieldString); } public void setAlpha(float value) { getAlphaField().setValue(value); } public void setAlpha(String value) { getAlphaField().setValue(value); } public float getAlpha() { return getAlphaField().getValue(); } //////////////////////////////////////////////// // Color //////////////////////////////////////////////// public SFColor getColorField() { if (isInstanceNode() == false) return colorField; return (SFColor)getExposedField(colorFieldString); } public void setColor(float value[]) { getColorField().setValue(value); } public void setColor(float r, float g, float b) { getColorField().setValue(r, g, b); } public void setColor(String value) { getColorField().setValue(value); } public void getColor(float value[]) { getColorField().getValue(value); } //////////////////////////////////////////////// // abstract functions //////////////////////////////////////////////// public boolean isChildNodeType(Node node) { return false; } public void initialize() { } public void uninitialize() { } public void update() { } //////////////////////////////////////////////// // Output //////////////////////////////////////////////// public void outputContext(PrintWriter printStream, String indentString) { } }
package com.hubspot.deploy; import com.hubspot.singularity.executor.SingularityExecutorLogrotateFrequency; import java.util.List; import java.util.Map; import java.util.Optional; public class ExecutorDataBuilder { private String cmd; private List<EmbeddedArtifact> embeddedArtifacts; private List<ExternalArtifact> externalArtifacts; private List<S3Artifact> s3Artifacts; private List<Integer> successfulExitCodes; private Optional<String> runningSentinel; private Optional<String> user; private List<String> extraCmdLineArgs; private Optional<String> loggingTag; private Map<String, String> loggingExtraFields; private Optional<Long> sigKillProcessesAfterMillis; private Optional<Integer> maxTaskThreads; private Optional<Boolean> preserveTaskSandboxAfterFinish; private Optional<Integer> maxOpenFiles; private Optional<Boolean> skipLogrotateAndCompress; private Optional<List<S3ArtifactSignature>> s3ArtifactSignatures; private Optional<SingularityExecutorLogrotateFrequency> logrotateFrequency; public ExecutorDataBuilder( String cmd, List<EmbeddedArtifact> embeddedArtifacts, List<ExternalArtifact> externalArtifacts, List<S3Artifact> s3Artifacts, List<Integer> successfulExitCodes, Optional<String> runningSentinel, Optional<String> user, List<String> extraCmdLineArgs, Optional<String> loggingTag, Map<String, String> loggingExtraFields, Optional<Long> sigKillProcessesAfterMillis, Optional<Integer> maxTaskThreads, Optional<Boolean> preserveTaskSandboxAfterFinish, Optional<Integer> maxOpenFiles, Optional<Boolean> skipLogrotateAndCompress, Optional<List<S3ArtifactSignature>> s3ArtifactSignatures, Optional<SingularityExecutorLogrotateFrequency> logrotateFrequency ) { this.cmd = cmd; this.embeddedArtifacts = embeddedArtifacts; this.externalArtifacts = externalArtifacts; this.s3Artifacts = s3Artifacts; this.successfulExitCodes = successfulExitCodes; this.runningSentinel = runningSentinel; this.user = user; this.extraCmdLineArgs = extraCmdLineArgs; this.loggingTag = loggingTag; this.loggingExtraFields = loggingExtraFields; this.sigKillProcessesAfterMillis = sigKillProcessesAfterMillis; this.maxTaskThreads = maxTaskThreads; this.preserveTaskSandboxAfterFinish = preserveTaskSandboxAfterFinish; this.maxOpenFiles = maxOpenFiles; this.skipLogrotateAndCompress = skipLogrotateAndCompress; this.s3ArtifactSignatures = s3ArtifactSignatures; this.logrotateFrequency = logrotateFrequency; } public ExecutorDataBuilder() {} public ExecutorData build() { return new ExecutorData( cmd, embeddedArtifacts, externalArtifacts, s3Artifacts, successfulExitCodes, user, runningSentinel, extraCmdLineArgs, loggingTag, loggingExtraFields, sigKillProcessesAfterMillis, maxTaskThreads, preserveTaskSandboxAfterFinish, maxOpenFiles, skipLogrotateAndCompress, s3ArtifactSignatures, logrotateFrequency ); } public Optional<String> getLoggingTag() { return loggingTag; } public ExecutorDataBuilder setLoggingTag(Optional<String> loggingTag) { this.loggingTag = loggingTag; return this; } public Map<String, String> getLoggingExtraFields() { return loggingExtraFields; } public ExecutorDataBuilder setLoggingExtraFields( Map<String, String> loggingExtraFields ) { this.loggingExtraFields = loggingExtraFields; return this; } public String getCmd() { return cmd; } public List<EmbeddedArtifact> getEmbeddedArtifacts() { return embeddedArtifacts; } public List<ExternalArtifact> getExternalArtifacts() { return externalArtifacts; } public List<Integer> getSuccessfulExitCodes() { return successfulExitCodes; } public List<String> getExtraCmdLineArgs() { return extraCmdLineArgs; } public Optional<String> getRunningSentinel() { return runningSentinel; } public Optional<String> getUser() { return user; } public ExecutorDataBuilder setCmd(String cmd) { this.cmd = cmd; return this; } public Optional<Long> getSigKillProcessesAfterMillis() { return sigKillProcessesAfterMillis; } public ExecutorDataBuilder setSigKillProcessesAfterMillis( Optional<Long> sigKillProcessesAfterMillis ) { this.sigKillProcessesAfterMillis = sigKillProcessesAfterMillis; return this; } public ExecutorDataBuilder setEmbeddedArtifacts( List<EmbeddedArtifact> embeddedArtifacts ) { this.embeddedArtifacts = embeddedArtifacts; return this; } public ExecutorDataBuilder setExternalArtifacts( List<ExternalArtifact> externalArtifacts ) { this.externalArtifacts = externalArtifacts; return this; } public ExecutorDataBuilder setSuccessfulExitCodes(List<Integer> successfulExitCodes) { this.successfulExitCodes = successfulExitCodes; return this; } public ExecutorDataBuilder setRunningSentinel(Optional<String> runningSentinel) { this.runningSentinel = runningSentinel; return this; } public ExecutorDataBuilder setUser(Optional<String> user) { this.user = user; return this; } public ExecutorDataBuilder setExtraCmdLineArgs(List<String> extraCmdLineArgs) { this.extraCmdLineArgs = extraCmdLineArgs; return this; } public List<S3Artifact> getS3Artifacts() { return s3Artifacts; } public ExecutorDataBuilder setS3Artifacts(List<S3Artifact> s3Artifacts) { this.s3Artifacts = s3Artifacts; return this; } public Optional<Integer> getMaxTaskThreads() { return maxTaskThreads; } public ExecutorDataBuilder setMaxTaskThreads(Optional<Integer> maxTaskThreads) { this.maxTaskThreads = maxTaskThreads; return this; } public Optional<Boolean> getPreserveTaskSandboxAfterFinish() { return preserveTaskSandboxAfterFinish; } public ExecutorDataBuilder setPreserveTaskSandboxAfterFinish( Optional<Boolean> preserveTaskSandboxAfterFinish ) { this.preserveTaskSandboxAfterFinish = preserveTaskSandboxAfterFinish; return this; } public Optional<Integer> getMaxOpenFiles() { return maxOpenFiles; } public ExecutorDataBuilder setMaxOpenFiles(Optional<Integer> maxOpenFiles) { this.maxOpenFiles = maxOpenFiles; return this; } public Optional<Boolean> getSkipLogrotateAndCompress() { return skipLogrotateAndCompress; } public ExecutorDataBuilder setSkipLogrotateAndCompress( Optional<Boolean> skipLogrotateAndCompress ) { this.skipLogrotateAndCompress = skipLogrotateAndCompress; return this; } public Optional<List<S3ArtifactSignature>> getS3ArtifactSignatures() { return s3ArtifactSignatures; } public ExecutorDataBuilder setS3ArtifactSignatures( Optional<List<S3ArtifactSignature>> s3ArtifactSignatures ) { this.s3ArtifactSignatures = s3ArtifactSignatures; return this; } public Optional<SingularityExecutorLogrotateFrequency> getLogrotateFrequency() { return logrotateFrequency; } public ExecutorDataBuilder setLogrotateFrequency( Optional<SingularityExecutorLogrotateFrequency> logrotateFrequency ) { this.logrotateFrequency = logrotateFrequency; return this; } @Override public String toString() { return ( "ExecutorDataBuilder{" + "cmd='" + cmd + '\'' + ", embeddedArtifacts=" + embeddedArtifacts + ", externalArtifacts=" + externalArtifacts + ", s3Artifacts=" + s3Artifacts + ", successfulExitCodes=" + successfulExitCodes + ", runningSentinel=" + runningSentinel + ", user=" + user + ", extraCmdLineArgs=" + extraCmdLineArgs + ", loggingTag=" + loggingTag + ", loggingExtraFields=" + loggingExtraFields + ", sigKillProcessesAfterMillis=" + sigKillProcessesAfterMillis + ", maxTaskThreads=" + maxTaskThreads + ", preserveTaskSandboxAfterFinish=" + preserveTaskSandboxAfterFinish + ", maxOpenFiles=" + maxOpenFiles + ", skipLogrotateAndCompress=" + skipLogrotateAndCompress + ", s3ArtifactSignatures=" + s3ArtifactSignatures + ", logrotateFrequency=" + logrotateFrequency + '}' ); } }
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.primitives; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkElementIndex; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkPositionIndexes; import java.io.Serializable; import java.util.AbstractList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.RandomAccess; import com.google.common.annotations.GwtCompatible; /** * Static utility methods pertaining to {@code float} primitives, that are not * already found in either {@link Float} or {@link Arrays}. * * @author Kevin Bourrillion * @since 1 */ @GwtCompatible public final class Floats { private Floats() {} /** * Returns a hash code for {@code value}; equal to the result of invoking * {@code ((Float) value).hashCode()}. * * @param value a primitive {@code float} value * @return a hash code for the value */ public static int hashCode(float value) { // TODO: is there a better way, that's still gwt-safe? return ((Float) value).hashCode(); } /** * Compares the two specified {@code float} values using {@link * Float#compare(float, float)}. You may prefer to invoke that method * directly; this method exists only for consistency with the other utilities * in this package. * * @param a the first {@code float} to compare * @param b the second {@code float} to compare * @return the result of invoking {@link Float#compare(float, float)} */ public static int compare(float a, float b) { return Float.compare(a, b); } /** * Returns {@code true} if {@code target} is present as an element anywhere in * {@code array}. Note that this always returns {@code false} when {@code * target} is {@code NaN}. * * @param array an array of {@code float} values, possibly empty * @param target a primitive {@code float} value * @return {@code true} if {@code array[i] == target} for some value of {@code * i} */ public static boolean contains(float[] array, float target) { for (float value : array) { if (value == target) { return true; } } return false; } /** * Returns the index of the first appearance of the value {@code target} in * {@code array}. Note that this always returns {@code -1} when {@code target} * is {@code NaN}. * * @param array an array of {@code float} values, possibly empty * @param target a primitive {@code float} value * @return the least index {@code i} for which {@code array[i] == target}, or * {@code -1} if no such index exists. */ public static int indexOf(float[] array, float target) { return indexOf(array, target, 0, array.length); } // TODO: consider making this public private static int indexOf( float[] array, float target, int start, int end) { for (int i = start; i < end; i++) { if (array[i] == target) { return i; } } return -1; } /** * Returns the start position of the first occurrence of the specified {@code * target} within {@code array}, or {@code -1} if there is no such occurrence. * * <p>More formally, returns the lowest index {@code i} such that {@code * java.util.Arrays.copyOfRange(array, i, i + target.length)} contains exactly * the same elements as {@code target}. * * <p>Note that this always returns {@code -1} when {@code target} contains * {@code NaN}. * * @param array the array to search for the sequence {@code target} * @param target the array to search for as a sub-sequence of {@code array} */ public static int indexOf(float[] array, float[] target) { checkNotNull(array, "array"); checkNotNull(target, "target"); if (target.length == 0) { return 0; } outer: for (int i = 0; i < array.length - target.length + 1; i++) { for (int j = 0; j < target.length; j++) { if (array[i + j] != target[j]) { continue outer; } } return i; } return -1; } /** * Returns the index of the last appearance of the value {@code target} in * {@code array}. Note that this always returns {@code -1} when {@code target} * is {@code NaN}. * * @param array an array of {@code float} values, possibly empty * @param target a primitive {@code float} value * @return the greatest index {@code i} for which {@code array[i] == target}, * or {@code -1} if no such index exists. */ public static int lastIndexOf(float[] array, float target) { return lastIndexOf(array, target, 0, array.length); } // TODO: consider making this public private static int lastIndexOf( float[] array, float target, int start, int end) { for (int i = end - 1; i >= start; i--) { if (array[i] == target) { return i; } } return -1; } /** * Returns the least value present in {@code array}, using the same rules of * comparison as {@link Math#min(float, float)}. * * @param array a <i>nonempty</i> array of {@code float} values * @return the value present in {@code array} that is less than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static float min(float... array) { checkArgument(array.length > 0); float min = array[0]; for (int i = 1; i < array.length; i++) { min = Math.min(min, array[i]); } return min; } /** * Returns the greatest value present in {@code array}, using the same rules * of comparison as {@link Math#min(float, float)}. * * @param array a <i>nonempty</i> array of {@code float} values * @return the value present in {@code array} that is greater than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static float max(float... array) { checkArgument(array.length > 0); float max = array[0]; for (int i = 1; i < array.length; i++) { max = Math.max(max, array[i]); } return max; } /** * Returns the values from each provided array combined into a single array. * For example, {@code concat(new float[] {a, b}, new float[] {}, new * float[] {c}} returns the array {@code {a, b, c}}. * * @param arrays zero or more {@code float} arrays * @return a single array containing all the values from the source arrays, in * order */ public static float[] concat(float[]... arrays) { int length = 0; for (float[] array : arrays) { length += array.length; } float[] result = new float[length]; int pos = 0; for (float[] array : arrays) { System.arraycopy(array, 0, result, pos, array.length); pos += array.length; } return result; } /** * Returns an array containing the same values as {@code array}, but * guaranteed to be of a specified minimum length. If {@code array} already * has a length of at least {@code minLength}, it is returned directly. * Otherwise, a new array of size {@code minLength + padding} is returned, * containing the values of {@code array}, and zeroes in the remaining places. * * @param array the source array * @param minLength the minimum length the returned array must guarantee * @param padding an extra amount to "grow" the array by if growth is * necessary * @throws IllegalArgumentException if {@code minLength} or {@code padding} is * negative * @return an array containing the values of {@code array}, with guaranteed * minimum length {@code minLength} */ public static float[] ensureCapacity( float[] array, int minLength, int padding) { checkArgument(minLength >= 0, "Invalid minLength: %s", minLength); checkArgument(padding >= 0, "Invalid padding: %s", padding); return (array.length < minLength) ? copyOf(array, minLength + padding) : array; } // Arrays.copyOf() requires Java 6 private static float[] copyOf(float[] original, int length) { float[] copy = new float[length]; System.arraycopy(original, 0, copy, 0, Math.min(original.length, length)); return copy; } /** * Returns a string containing the supplied {@code float} values, converted * to strings as specified by {@link Float#toString(float)}, and separated by * {@code separator}. For example, {@code join("-", 1.0f, 2.0f, 3.0f)} * returns the string {@code "1.0-2.0-3.0"}. * * @param separator the text that should appear between consecutive values in * the resulting string (but not at the start or end) * @param array an array of {@code float} values, possibly empty */ public static String join(String separator, float... array) { checkNotNull(separator); if (array.length == 0) { return ""; } // For pre-sizing a builder, just get the right order of magnitude StringBuilder builder = new StringBuilder(array.length * 12); builder.append(array[0]); for (int i = 1; i < array.length; i++) { builder.append(separator).append(array[i]); } return builder.toString(); } /** * Returns a comparator that compares two {@code float} arrays * lexicographically. That is, it compares, using {@link * #compare(float, float)}), the first pair of values that follow any * common prefix, or when one array is a prefix of the other, treats the * shorter array as the lesser. For example, {@code [] < [1.0f] < [1.0f, 2.0f] * < [2.0f]}. * * <p>The returned comparator is inconsistent with {@link * Object#equals(Object)} (since arrays support only identity equality), but * it is consistent with {@link Arrays#equals(float[], float[])}. * * @see <a href="http://en.wikipedia.org/wiki/Lexicographical_order"> * Lexicographical order</a> article at Wikipedia * @since 2 */ public static Comparator<float[]> lexicographicalComparator() { return LexicographicalComparator.INSTANCE; } private enum LexicographicalComparator implements Comparator<float[]> { INSTANCE; public int compare(float[] left, float[] right) { int minLength = Math.min(left.length, right.length); for (int i = 0; i < minLength; i++) { int result = Floats.compare(left[i], right[i]); if (result != 0) { return result; } } return left.length - right.length; } } /** * Copies a collection of {@code Float} instances into a new array of * primitive {@code float} values. * * <p>Elements are copied from the argument collection as if by {@code * collection.toArray()}. Calling this method is as thread-safe as calling * that method. * * @param collection a collection of {@code Float} objects * @return an array containing the same values as {@code collection}, in the * same order, converted to primitives * @throws NullPointerException if {@code collection} or any of its elements * is null */ public static float[] toArray(Collection<Float> collection) { if (collection instanceof FloatArrayAsList) { return ((FloatArrayAsList) collection).toFloatArray(); } Object[] boxedArray = collection.toArray(); int len = boxedArray.length; float[] array = new float[len]; for (int i = 0; i < len; i++) { array[i] = (Float) boxedArray[i]; } return array; } /** * Returns a fixed-size list backed by the specified array, similar to {@link * Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, * but any attempt to set a value to {@code null} will result in a {@link * NullPointerException}. * * <p>The returned list maintains the values, but not the identities, of * {@code Float} objects written to or read from it. For example, whether * {@code list.get(0) == list.get(0)} is true for the returned list is * unspecified. * * <p>The returned list may have unexpected behavior if it contains {@code * NaN}, or if {@code NaN} is used as a parameter to any of its methods. * * @param backingArray the array to back the list * @return a list view of the array */ public static List<Float> asList(float... backingArray) { if (backingArray.length == 0) { return Collections.emptyList(); } return new FloatArrayAsList(backingArray); } @GwtCompatible private static class FloatArrayAsList extends AbstractList<Float> implements RandomAccess, Serializable { final float[] array; final int start; final int end; FloatArrayAsList(float[] array) { this(array, 0, array.length); } FloatArrayAsList(float[] array, int start, int end) { this.array = array; this.start = start; this.end = end; } @Override public int size() { return end - start; } @Override public boolean isEmpty() { return false; } @Override public Float get(int index) { checkElementIndex(index, size()); return array[start + index]; } @Override public boolean contains(Object target) { // Overridden to prevent a ton of boxing return (target instanceof Float) && Floats.indexOf(array, (Float) target, start, end) != -1; } @Override public int indexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Float) { int i = Floats.indexOf(array, (Float) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public int lastIndexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Float) { int i = Floats.lastIndexOf(array, (Float) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public Float set(int index, Float element) { checkElementIndex(index, size()); float oldValue = array[start + index]; array[start + index] = element; return oldValue; } /** In GWT, List and AbstractList do not have the subList method. */ @Override public List<Float> subList(int fromIndex, int toIndex) { int size = size(); checkPositionIndexes(fromIndex, toIndex, size); if (fromIndex == toIndex) { return Collections.emptyList(); } return new FloatArrayAsList(array, start + fromIndex, start + toIndex); } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof FloatArrayAsList) { FloatArrayAsList that = (FloatArrayAsList) object; int size = size(); if (that.size() != size) { return false; } for (int i = 0; i < size; i++) { if (array[start + i] != that.array[that.start + i]) { return false; } } return true; } return super.equals(object); } @Override public int hashCode() { int result = 1; for (int i = start; i < end; i++) { result = 31 * result + Floats.hashCode(array[i]); } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(size() * 12); builder.append('[').append(array[start]); for (int i = start + 1; i < end; i++) { builder.append(", ").append(array[i]); } return builder.append(']').toString(); } float[] toFloatArray() { // Arrays.copyOfRange() requires Java 6 int size = size(); float[] result = new float[size]; System.arraycopy(array, start, result, 0, size); return result; } private static final long serialVersionUID = 0; } }
/* Copyright 2014 Trustees of Indiana University Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package edu.iu.grnoc.flowspace_firewall; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Timer; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPathExpressionException; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.IOFSwitch.PortChangeType; import net.floodlightcontroller.core.IOFSwitchListener; import net.floodlightcontroller.core.ImmutablePort; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.restserver.IRestApiService; import org.openflow.protocol.OFFlowMod; import org.openflow.protocol.OFMessage; import org.openflow.protocol.OFType; import org.openflow.protocol.statistics.OFStatistics; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.SAXException; import edu.iu.grnoc.flowspace_firewall.web.FlowSpaceFirewallWebRoutable; import edu.iu.grnoc.flowspace_firewall.web.IFlowSpaceFirewallService; import edu.iu.grnoc.flowspace_firewall.FlowStatCacher; public class FlowSpaceFirewall implements IFloodlightModule, IOFMessageListener, IOFSwitchListener, IFlowSpaceFirewallService{ protected static Logger logger; protected IFloodlightProviderService floodlightProvider; private Timer statsTimer; private Timer controllerConnectTimer; private ArrayList<HashMap<Long, Slicer>> slices; private List<IOFSwitch> switches; private FlowStatCacher statsCacher; private ControllerConnector controllerConnector; protected IRestApiService restApi; @Override public String getName() { //return "edu.iu.grnoc.flowspace_firewall"; return FlowSpaceFirewall.class.getSimpleName(); } @Override public boolean isCallbackOrderingPrereq(OFType type, String name) { // TODO Auto-generated method stub return false; } @Override public boolean isCallbackOrderingPostreq(OFType type, String name) { // TODO Auto-generated method stub return false; } @Override public void switchAdded(long switchId) { logger.info("Switch " + switchId + " has joined"); IOFSwitch sw = floodlightProvider.getSwitch(switchId); //belts and suspenders here //we don't want there to be a lot of switches with this for(IOFSwitch tmpSw : this.switches){ if(tmpSw.getId() == switchId){ logger.error("Switch is already listed as connected! Removing!"); this.switchRemoved(switchId); } } this.switches.add(sw); //loop through all slices for(HashMap<Long, Slicer> slice: slices){ //loop through all switches in the slice if(slice.containsKey(switchId)){ Slicer vlanSlicer = slice.get(switchId); //build the controller channel controllerConnector.addProxy(switchId, new Proxy(sw, vlanSlicer, this)); } } } public void addFlowCache(long switchId, String sliceName, OFFlowMod flowMod){ this.statsCacher.addFlowCache(switchId, sliceName,flowMod); } public void delFlowCache(long switchId, String sliceName, OFFlowMod flowMod){ this.statsCacher.delFlowCache(switchId, sliceName, flowMod); } public List<IOFSwitch> getSwitches(){ return this.switches; } public HashMap<Short, OFStatistics> getPortStats(long switchId){ return statsCacher.getPortStats(switchId); } public OFStatistics getPortStats(long switchId, short portId){ return statsCacher.getPortStats(switchId, portId); } public List<OFStatistics> getSlicedFlowStats(long switchId, String sliceName){ return statsCacher.getSlicedFlowStats(switchId, sliceName); } public List<OFStatistics> getStats(long switchId){ return statsCacher.getSwitchStats(switchId); } public List<Proxy> getSwitchProxies(long switchId){ return controllerConnector.getSwitchProxies(switchId); } public synchronized void setSlice(long dpid, String name, Slicer slice){ for(HashMap<Long, Slicer> hash : this.slices){ if(hash.containsKey(dpid)){ if(hash.get(dpid).getSliceName().equals(name)){ hash.put(dpid, slice); return; } } } //if we made it here then there was no slicer... HashMap<Long, Slicer> tmp = new HashMap<Long,Slicer>(); tmp.put(dpid, slice); this.slices.add(tmp); } public synchronized void removeSlice(long dpid, String name){ for(HashMap<Long, Slicer> hash : this.slices){ if(hash.containsKey(dpid)){ if(hash.get(dpid).getSliceName().equals(name)){ hash.remove(dpid); } } } for(int i=0;i< this.slices.size(); i++){ HashMap<Long, Slicer> tmp = this.slices.get(i); if(tmp.isEmpty()){ this.slices.remove(i); } } } @Override public void switchRemoved(long switchId) { logger.error("Switch removed!"); List <Proxy> proxies = controllerConnector.getSwitchProxies(switchId); Iterator <Proxy> it = proxies.iterator(); Iterator <IOFSwitch> switchIt = this.switches.iterator(); while(switchIt.hasNext()){ IOFSwitch tmpSwitch = switchIt.next(); if(tmpSwitch.getId() == switchId){ switchIt.remove(); } } this.statsCacher.clearCache(switchId); while(it.hasNext()){ Proxy p = it.next(); p.disconnect(); it.remove(); } } public synchronized HashMap<Long, Slicer> getSlice(String name){ List<HashMap<Long,Slicer>> mySlices = Collections.synchronizedList(this.slices); Iterator <HashMap<Long,Slicer>> it = mySlices.iterator(); while(it.hasNext()){ HashMap<Long,Slicer> slice = it.next(); Iterator <Long> dpidIt = slice.keySet().iterator(); if(dpidIt.hasNext()){ Long dpid = dpidIt.next(); if(slice.get(dpid).getSliceName().equals(name)){ return slice; } } } return null; } public synchronized List<HashMap<Long,Slicer>> getSlices(){ List<HashMap<Long,Slicer>> slices = Collections.synchronizedList(this.slices); logger.debug("slices size: "+slices.size()); return slices; } @Override public void switchActivated(long switchId) { logger.debug("Switch Activated"); } public void removeProxy(Long switchId, Proxy p){ this.controllerConnector.removeProxy(switchId, p); } @Override public void switchPortChanged(long switchId, ImmutablePort port, PortChangeType type) { //nothing to do here } @Override public void switchChanged(long switchId) { //we don't do anything here logger.debug("Switch changed!"); } /** * reloadConfig - reloads the configuration of FSF * disconnects/re-connects/and connects to slices as the FSF config specifies * this is only called via the web-service */ public boolean reloadConfig(){ ArrayList<HashMap<Long, Slicer>> newSlices; ArrayList<Proxy> toBeRemoved = new ArrayList<Proxy>(); //have our new configuration //need to put it in place try { newSlices = ConfigParser.parseConfig("/etc/fsfw/fsfw.xml"); //remove the existing slices Iterator <HashMap<Long,Slicer>> sliceIt = this.slices.iterator(); while(sliceIt.hasNext()){ @SuppressWarnings("unused") HashMap<Long,Slicer> tmp = sliceIt.next(); sliceIt.remove(); } //push this into our this.slices variable for(HashMap<Long,Slicer> slice : newSlices){ for(Long dpid : slice.keySet()){ this.setSlice(dpid, slice.get(dpid).getSliceName(), slice.get(dpid)); } } List <Proxy> proxies = controllerConnector.getAllProxies(); logger.warn("number of proxies " + proxies.size() ); for(Proxy p : proxies){ //we now know the proxy and the switch (so we know the slice name and the switch) //now we need to find the slice in the newSlices variable and set the proxy to it boolean updated = false; for(HashMap<Long, Slicer> slice: newSlices){ logger.debug("number of switches in newslice:"+slice.keySet().size()); if(slice.containsKey(p.getSwitch().getId()) && slice.get(p.getSwitch().getId()).getSliceName().equals(p.getSlicer().getSliceName())){ p.setSlicer(slice.get(p.getSwitch().getId())); logger.warn("Slice " + p.getSlicer().getSliceName() + " was found, setting updated to true"); updated = true; slice.remove(p.getSwitch().getId()); } } if(!updated){ logger.warn("Slice " +p.getSlicer().getSliceName()+":" + p.getSlicer().getSwitchName() +" was not found, removing"); p.disconnect(); toBeRemoved.add(p); } } //so now we have updated all the ones connected and removed all the ones that are no longer there //we still need to connect up new ones Iterator <HashMap<Long,Slicer>> sliceIt2 = newSlices.iterator(); logger.warn("Number of items left in newSlices: " + newSlices.size()); while(sliceIt2.hasNext()){ //iterate over the slices HashMap<Long,Slicer> slice = sliceIt2.next(); //for each slice iterator over any switches configured if(slice.isEmpty()){ } else{ for(Long dpid: slice.keySet()){ //connect it up IOFSwitch sw = floodlightProvider.getSwitch(dpid); if(sw == null){ logger.debug("Switch was not connected... can't add the proxy"); }else{ Slicer vlanSlicer = slice.get(dpid); controllerConnector.addProxy(dpid, new Proxy(sw, vlanSlicer, this)); } } } } //remove any proxies that are to be removed for(Proxy p: toBeRemoved){ this.removeProxy(p.getSwitch().getId(), p); this.removeSlice(p.getSwitch().getId(), p.getSlicer().getSliceName()); } } catch (IOException e) { e.printStackTrace(); return false; } catch (SAXException e) { e.printStackTrace(); return false; } catch(ParserConfigurationException e){ logger.error(e.getMessage()); return false; } catch(XPathExpressionException e){ logger.error(e.getMessage()); return false; } catch(InvalidConfigException e){ logger.error(e.getMsg()); return false; } logger.debug("Number of slices after reload: "+this.slices.size()); return true; } @Override public net.floodlightcontroller.core.IListener.Command receive( IOFSwitch sw, OFMessage msg, FloodlightContext cntx) { if(sw == null || !sw.isActive()){ return Command.CONTINUE; } logger.debug("Received: " + msg.toString() + " from switch: " + sw.getStringId()); List <Proxy> proxies = controllerConnector.getSwitchProxies(sw.getId()); if(proxies == null){ logger.warn("No proxies for switch: " + sw.getStringId()); return Command.CONTINUE; } for(Proxy p : proxies){ if(!p.getAdminStatus()){ logger.debug("slice disabled... skipping"); }else{ try{ logger.debug("attempting to send " + msg.toString() + " to slice: " + p.getSlicer().getSliceName() + " from switch: " + p.getSlicer().getSwitchName()); p.toController(msg,cntx); }catch (Exception e){ //don't die please... just keep going and error the stack trace logger.error("FSFW experienced an error:" + e.getMessage(), e); } } } return Command.CONTINUE; } @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IFloodlightProviderService.class); l.add(IFlowSpaceFirewallService.class); return l; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(IFlowSpaceFirewallService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { // TODO Auto-generated method stub return null; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class); logger = LoggerFactory.getLogger(FlowSpaceFirewall.class); restApi = context.getServiceImpl(IRestApiService.class); //parses the config String configFile = "/etc/fsfw/fsfw.xml"; Map<String,String> config = context.getConfigParams(this); if(config.containsKey("configFile")){ configFile = config.get("configFile"); } try{ this.slices = ConfigParser.parseConfig(configFile); }catch (SAXException e){ logger.error("Problems parsing " + configFile + ": " + e.getMessage()); }catch (IOException e){ logger.error("Problems parsing " + configFile + ": " + e.getMessage()); } catch(ParserConfigurationException e){ logger.error(e.getMessage()); } catch(XPathExpressionException e){ logger.error(e.getMessage()); }catch(InvalidConfigException e){ logger.error(e.getMsg()); } if(this.slices == null || this.slices.size() == 0){ logger.error("Problem with the configuration file!"); throw new FloodlightModuleException("Problem with the Config!"); } } @Override public void startUp(FloodlightModuleContext context) throws FloodlightModuleException { floodlightProvider.addOFSwitchListener(this); floodlightProvider.addOFMessageListener(OFType.BARRIER_REPLY, this); floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this); floodlightProvider.addOFMessageListener(OFType.PORT_MOD, this); floodlightProvider.addOFMessageListener(OFType.PORT_STATUS, this); floodlightProvider.addOFMessageListener(OFType.ERROR,this); floodlightProvider.addOFMessageListener(OFType.FLOW_REMOVED, this); switches = Collections.synchronizedList(new ArrayList<IOFSwitch>()); //start up the stats collector timer statsTimer = new Timer("StatsTimer"); statsCacher = new FlowStatCacher(this); this.statsCacher.loadCache(); statsTimer.scheduleAtFixedRate(statsCacher, 0, 10 * 1000); //start up the controller connector timer controllerConnectTimer = new Timer("ControllerConnectionTimer"); controllerConnector = new ControllerConnector(); controllerConnectTimer.scheduleAtFixedRate(controllerConnector, 0, 10 * 1000); restApi.addRestletRoutable(new FlowSpaceFirewallWebRoutable()); } public boolean setSliceAdminState(Long dpid, String sliceName, boolean state){ List<Proxy> proxies = this.controllerConnector.getSwitchProxies(dpid); for(Proxy p: proxies){ if(p.getSlicer().getSliceName().equals(sliceName)){ logger.info("Setting Slice: " + sliceName + " admin state to " + state); p.setAdminStatus(state); return true; } } return false; } public Proxy getProxy(Long dpid, String sliceName){ List<Proxy> proxies = this.controllerConnector.getSwitchProxies(dpid); for(Proxy p: proxies){ if(p.getSlicer().getSliceName().equals(sliceName)){ return p; } } return null; } /* @Override public List<OFStatistics> getSliceFlows(String sliceName, Long dpid) { return this.statsCacher.getSlicedFlowStats(dpid, sliceName)); } */ /* @Override public HashMap<String, Object> getSliceStatus(String sliceName, Long dpid) { // TODO Auto-generated method stub return null; } */ }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.clients.MockClient; import org.apache.kafka.common.MetricName; import org.apache.kafka.common.Node; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.config.AbstractConfig; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.metrics.KafkaMetric; import org.apache.kafka.common.metrics.MetricsReporter; import org.apache.kafka.common.requests.AbstractRequest; import org.apache.kafka.common.requests.ApiError; import org.apache.kafka.common.requests.CreateTopicsRequest; import org.apache.kafka.common.requests.CreateTopicsResponse; import org.apache.kafka.common.requests.MetadataResponse; import org.apache.kafka.common.requests.ProduceResponse; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.StreamsException; import org.junit.Before; import org.junit.Test; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; public class StreamsKafkaClientTest { private static final String TOPIC = "topic"; private final MockClient kafkaClient = new MockClient(new MockTime()); private final List<MetricsReporter> reporters = Collections.emptyList(); private final MetadataResponse metadata = new MetadataResponse(Collections.singletonList(new Node(1, "host", 90)), "cluster", 1, Collections.<MetadataResponse.TopicMetadata>emptyList()); private final Map<String, Object> config = new HashMap<>(); private final InternalTopicConfig topicConfigWithNoOverrides = new InternalTopicConfig(TOPIC, Collections.singleton(InternalTopicConfig.CleanupPolicy.delete), Collections.<String, String>emptyMap()); private final Map<String, String> overridenTopicConfig = Collections.singletonMap(TopicConfig.DELETE_RETENTION_MS_CONFIG, "100"); private final InternalTopicConfig topicConfigWithOverrides = new InternalTopicConfig(TOPIC, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), overridenTopicConfig); @Before public void before() { config.put(StreamsConfig.APPLICATION_ID_CONFIG, "some_app_id"); config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); } @Test public void testConfigFromStreamsConfig() { for (final String expectedMechanism : asList("PLAIN", "SCRAM-SHA-512")) { config.put(SaslConfigs.SASL_MECHANISM, expectedMechanism); final AbstractConfig abstractConfig = StreamsKafkaClient.Config.fromStreamsConfig(config); assertEquals(expectedMechanism, abstractConfig.values().get(SaslConfigs.SASL_MECHANISM)); assertEquals(expectedMechanism, abstractConfig.getString(SaslConfigs.SASL_MECHANISM)); } } @Test public void shouldAddCleanupPolicyToTopicConfigWhenCreatingTopic() throws Exception { final StreamsKafkaClient streamsKafkaClient = createStreamsKafkaClient(); verifyCorrectTopicConfigs(streamsKafkaClient, topicConfigWithNoOverrides, Collections.singletonMap("cleanup.policy", "delete")); } @Test public void shouldAddDefaultTopicConfigFromStreamConfig() throws Exception { config.put(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_MS_CONFIG), "100"); config.put(StreamsConfig.topicPrefix(TopicConfig.COMPRESSION_TYPE_CONFIG), "gzip"); final Map<String, String> expectedConfigs = new HashMap<>(); expectedConfigs.put(TopicConfig.SEGMENT_MS_CONFIG, "100"); expectedConfigs.put(TopicConfig.COMPRESSION_TYPE_CONFIG, "gzip"); expectedConfigs.put(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_DELETE); final StreamsKafkaClient streamsKafkaClient = createStreamsKafkaClient(); verifyCorrectTopicConfigs(streamsKafkaClient, topicConfigWithNoOverrides, expectedConfigs); } @Test public void shouldSetPropertiesDefinedByInternalTopicConfig() throws Exception { final Map<String, String> expectedConfigs = new HashMap<>(overridenTopicConfig); expectedConfigs.put(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT); final StreamsKafkaClient streamsKafkaClient = createStreamsKafkaClient(); verifyCorrectTopicConfigs(streamsKafkaClient, topicConfigWithOverrides, expectedConfigs); } @Test public void shouldOverrideDefaultTopicConfigsFromStreamsConfig() throws Exception { config.put(StreamsConfig.topicPrefix(TopicConfig.DELETE_RETENTION_MS_CONFIG), "99999"); config.put(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_MS_CONFIG), "988"); final Map<String, String> expectedConfigs = new HashMap<>(overridenTopicConfig); expectedConfigs.put(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT); expectedConfigs.put(TopicConfig.DELETE_RETENTION_MS_CONFIG, "100"); expectedConfigs.put(TopicConfig.SEGMENT_MS_CONFIG, "988"); final StreamsKafkaClient streamsKafkaClient = createStreamsKafkaClient(); verifyCorrectTopicConfigs(streamsKafkaClient, topicConfigWithOverrides, expectedConfigs); } @Test public void shouldNotAllowNullTopicConfigs() throws Exception { config.put(StreamsConfig.topicPrefix(TopicConfig.DELETE_RETENTION_MS_CONFIG), null); final StreamsKafkaClient streamsKafkaClient = createStreamsKafkaClient(); verifyCorrectTopicConfigs(streamsKafkaClient, topicConfigWithNoOverrides, Collections.singletonMap("cleanup.policy", "delete")); } @Test public void metricsShouldBeTaggedWithClientId() { config.put(StreamsConfig.CLIENT_ID_CONFIG, "some_client_id"); config.put(StreamsConfig.METRIC_REPORTER_CLASSES_CONFIG, TestMetricsReporter.class.getName()); StreamsKafkaClient.create(config); assertFalse(TestMetricsReporter.METRICS.isEmpty()); for (KafkaMetric kafkaMetric : TestMetricsReporter.METRICS.values()) { assertEquals("some_client_id", kafkaMetric.metricName().tags().get("client-id")); } } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionOnEmptyFetchMetadataResponse() { kafkaClient.prepareResponse(null); final StreamsKafkaClient streamsKafkaClient = createStreamsKafkaClient(); streamsKafkaClient.fetchMetadata(); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionWhenFetchMetadataResponseInconsistent() { kafkaClient.prepareResponse(new ProduceResponse(Collections.<TopicPartition, ProduceResponse.PartitionResponse>emptyMap())); final StreamsKafkaClient streamsKafkaClient = createStreamsKafkaClient(); streamsKafkaClient.fetchMetadata(); } private void verifyCorrectTopicConfigs(final StreamsKafkaClient streamsKafkaClient, final InternalTopicConfig internalTopicConfig, final Map<String, String> expectedConfigs) { final Map<String, String> requestedTopicConfigs = new HashMap<>(); kafkaClient.prepareResponse(new MockClient.RequestMatcher() { @Override public boolean matches(final AbstractRequest body) { if (!(body instanceof CreateTopicsRequest)) { return false; } final CreateTopicsRequest request = (CreateTopicsRequest) body; final Map<String, CreateTopicsRequest.TopicDetails> topics = request.topics(); final CreateTopicsRequest.TopicDetails topicDetails = topics.get(TOPIC); requestedTopicConfigs.putAll(topicDetails.configs); return true; } }, new CreateTopicsResponse(Collections.singletonMap(TOPIC, ApiError.NONE))); streamsKafkaClient.createTopics(Collections.singletonMap(internalTopicConfig, 1), 1, 1, metadata); assertThat(requestedTopicConfigs, equalTo(expectedConfigs)); } private StreamsKafkaClient createStreamsKafkaClient() { return new StreamsKafkaClient(StreamsKafkaClient.Config.fromStreamsConfig(config), kafkaClient, reporters, new LogContext()); } public static class TestMetricsReporter implements MetricsReporter { static final Map<MetricName, KafkaMetric> METRICS = new HashMap<>(); @Override public void configure(final Map<String, ?> configs) { } @Override public void init(final List<KafkaMetric> metrics) { for (final KafkaMetric metric : metrics) { metricChange(metric); } } @Override public void metricChange(final KafkaMetric metric) { METRICS.put(metric.metricName(), metric); } @Override public void metricRemoval(final KafkaMetric metric) { METRICS.remove(metric.metricName()); } @Override public void close() { METRICS.clear(); } } }
/** * Copyright 2016 Smart Society Services B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ package org.opensmartgridplatform.cucumber.platform; import org.joda.time.DateTime; import org.opensmartgridplatform.domain.core.valueobjects.DeviceLifecycleStatus; import org.opensmartgridplatform.domain.core.valueobjects.LongTermIntervalType; import org.opensmartgridplatform.domain.core.valueobjects.RelayType; /** * Defaults within the database. */ public class PlatformDefaults { public static final Boolean DEFAULT_ACTIVATED = true; public static final Boolean DEFAULT_ACTIVE = true; public static final DeviceLifecycleStatus DEFAULT_DEVICE_LIFECYCLE_STATUS = DeviceLifecycleStatus.IN_USE; public static final Integer DEFAULT_ACTUAL_CONSUMED_ENERGY = 96; public static final Integer DEFAULT_ACTUAL_CONSUMED_POWER = 48; public static final Integer DEFAULT_ACTUAL_CURRENT1 = 1; public static final Integer DEFAULT_ACTUAL_CURRENT2 = 2; public static final Integer DEFAULT_ACTUAL_CURRENT3 = 3; public static final Integer DEFAULT_ACTUAL_POWER1 = 1; public static final Integer DEFAULT_ACTUAL_POWER2 = 2; public static final Integer DEFAULT_ACTUAL_POWER3 = 3; public static final String DEFAULT_ALIAS = ""; public static final Boolean DEFAULT_ALLOWED = false; public static final Integer DEFAULT_AVERAGE_POWER_FACTOR1 = 1; public static final Integer DEFAULT_AVERAGE_POWER_FACTOR2 = 2; public static final Integer DEFAULT_AVERAGE_POWER_FACTOR3 = 3; public static final String DEFAULT_BEGIN_DATE = ""; public static final Short DEFAULT_CDMA_BATCH_NUMBER = 2; public static final String DEFAULT_CDMA_MAST_SEGMENT = "300/3"; public static final Short DEFAULT_CHANNEL = (short) 1; public static final String DEFAULT_CONTAINER_CITY = "StandardCity"; public static final String DEFAULT_CONTAINER_MUNICIPALITY = "GreaterStandardArea"; public static final String DEFAULT_CONTAINER_NUMBER = "123"; public static final String DEFAULT_CONTAINER_POSTALCODE = "6546KM"; public static final String DEFAULT_CONTAINER_STREET = "StandardStreet"; public static final String DEFAULT_DC_LIGHTS = "0"; public static final Long DEFAULT_DEVICE_ID = new java.util.Random().nextLong(); public static final String DEFAULT_DEVICE_IDENTIFICATION = "TD01"; public static final String DEFAULT_DEVICE_MODEL_DESCRIPTION = "Test Model"; public static final String DEFAULT_DEVICE_MODEL_MANUFACTURER = "Kaif"; public static final Boolean DEFAULT_DEVICE_MODEL_METERED = true; public static final String DEFAULT_DEVICE_MODEL_MODEL_CODE = "Test"; public static final String DEFAULT_DEVICE_MODEL_NAME = "Test Model"; public static final String DEFAULT_DEVICE_OUTPUT_SETTING_ALIAS = "Continues burner"; public static final Integer DEFAULT_DEVICE_OUTPUT_SETTING_EXTERNALID = 1; public static final Integer DEFAULT_DEVICE_OUTPUT_SETTING_INTERNALID = 1; public static final String DEFAULT_SCHEDULE_CODE = "ScheduleCode"; public static final String CODE = "Code"; public static final String DEFAULT_DEVICE_TYPE = "SSLD"; public static final Integer DEFAULT_DIMVALUE = 100; public static final String DEFAULT_DOMAINS = "COMMON;PUBLIC_LIGHTING;TARIFF_SWITCHING"; public static final String DEFAULT_EAN_DESCRIPTION = "dummy EAN description"; public static final String DEFAULT_END_DATE = ""; public static final String DEFAULT_EVENT_DESCRIPTION = "Event description"; public static final String DEFAULT_EVENTNOTIFICATIONS = ""; public static final String DEFAULT_EVENTNOTIFICATIONTYPES = ""; public static final Integer DEFAULT_EXTERNALID = 0; public static final Boolean DEFAULT_FILESTORAGE = true; public static final String DEFAULT_FIRMWARE_IDENTIFICATION = ""; public static final Boolean DEFAULT_HASSCHEDULE = false; public static final Boolean DEFAULT_INDEBUGMODE = false; public static final Integer DEFAULT_INDEX = 0; public static final Integer DEFAULT_INTERNALID = 0; public static final Boolean DEFAULT_IS_ACTIVATED = true; public static final Boolean DEFAULT_ISIMMEDIATE = false; public static final Float DEFAULT_LATITUDE = 0f; public static final String DEFAULT_LIGHTVALUES = ""; public static final String DEFAULT_TARIFFVALUES = ""; public static final Integer DEFAULT_LONG_INTERVAL = 1; public static final Float DEFAULT_LONGITUDE = 0f; public static final String DEFAULT_MANUFACTURER_CODE = "Test"; public static final String DEFAULT_MANUFACTURER_NAME = "Test"; public static final Boolean DEFAULT_MANUFACTURER_USE_PREFIX = false; public static final String DEFAULT_NEW_ORGANIZATION_IDENTIFICATION = "NewOrganization"; public static final String DEFAULT_NEW_ORGANIZATION_NAME = "New Organization"; public static final Boolean DEFAULT_ON = true; public static final String DEFAULT_ORGANIZATION_DESCRIPTION = "Test Organization"; public static final String DEFAULT_ORGANIZATION_DOMAINS = "COMMON;PUBLIC_LIGHTING;TARIFF_SWITCHING"; public static final Boolean DEFAULT_ORGANIZATION_ENABLED = true; public static final String DEFAULT_ORGANIZATION_IDENTIFICATION = "test-org"; public static final String DEFAULT_ORGANIZATION_IDENTIFICATION_TO_FIND = "test-org"; public static final String DEFAULT_DELEGATE_ORGANIZATION_IDENTIFICATION = "test-org"; public static final String DEFAULT_ORGANIZATION_NAME = "Test organization"; public static final String DEFAULT_ORGANIZATION_PREFIX = "cgi"; public static final String DEFAULT_OWNER = "test-org"; public static final Short DEFAULT_PAGE = 0; public static final Integer DEFAULT_PAGE_SIZE = 25; public static final String DEFAULT_PERIOD_TYPE = "INTERVAL"; public static final String DEFAULT_PREFIX = "MAA"; public static final String DEFAULT_PROTOCOL = "OSLP ELSTER"; public static final Long DEFAULT_PROTOCOL_INFO_ID = new java.util.Random().nextLong(); public static final String DEFAULT_PROTOCOL_VERSION = "1.0"; public static final String DEFAULT_PUBLIC_KEY = "123456abcdef"; public static final Boolean DEFAULT_PUBLICKEYPRESENT = true; public static final String DEFAULT_PUBLICLIGHTING_DESCRIPTION = ""; public static final String DEFAULT_RECORD_TIME = ""; public static final Integer DEFAULT_SHORT_INTERVAL = 15; public static final String DEFAULT_SMART_METER_DEVICE_IDENTIFICATION = "TEST1024000000001"; public static final String DEFAULT_SMART_METER_GAS_DEVICE_IDENTIFICATION = "TESTG102400000001"; public static final String DEFAULT_SUPPLIER = "Kaifa"; public static final DateTime DEFAULT_TECHNICAL_INSTALLATION_DATE = DateTime.now().minusDays(1); public static final Integer DEFAULT_TOTAL_LIGHTING_HOURS = 144; public static final Boolean DEFAULT_USE_PAGES = true; public static final String DEFAULT_USER_NAME = "Cucumber"; public static final String DEVICE_MODEL_MODEL_CODE = "Test"; public static final String DEVICE_MODEL_NAME = "Testmodel"; public static final String DLMS_DEFAULT_COMMUNICATION_METHOD = "GPRS"; public static final String DLMS_DEFAULT_DEVICE_TYPE = "SMART_METER_E"; public static final Boolean DLMS_DEFAULT_HSL3_ACTIVE = false; public static final Boolean DLMS_DEFAULT_HSL4_ACTIVE = false; public static final Boolean DLMS_DEFAULT_HSL5_ACTIVE = true; public static final Boolean DLMS_DEFAULT_IP_ADDRESS_IS_STATIC = true; public static final Long DLMS_DEFAULT_LOGICAL_ID = 1L; public static final Long DLMS_DEFAULT_PORT = 1024L; public static final String DLMS_DEFAULT_DEVICE_DELIVERY_DATE = "2016-05-11T00:00:00.000Z"; public static final String EMAIL = "someone@somewhere.nl"; public static final Boolean EVENTS_NODELIST_EXPECTED = false; public static final String EXPECTED_RESULT_OK = "OK"; public static final DateTime EXPIRYDATECONTRACT = DateTime.now().plusWeeks(1); public static final LongTermIntervalType INTERVAL_TYPE = LongTermIntervalType.DAYS; public static final Integer LONG_INTERVAL = 1; public static final String PHONENUMBER = "+31 43 1234567"; public static final org.opensmartgridplatform.domain.core.valueobjects.PlatformFunctionGroup PLATFORM_FUNCTION_GROUP = org.opensmartgridplatform.domain.core.valueobjects.PlatformFunctionGroup.ADMIN; public static final org.opensmartgridplatform.domain.core.valueobjects.DeviceFunctionGroup DEVICE_FUNCTION_GROUP = org.opensmartgridplatform.domain.core.valueobjects.DeviceFunctionGroup.OWNER; public static final Integer SHORT_INTERVAL = 15; public static final String SMART_METER_E = "SMART_METER_E"; public static final String SMART_METER_G = "SMART_METER_G"; public static final String FIRMWARE_IDENTIFICATION = "F01"; public static final Boolean FIRMWARE_PUSH_TO_NEW_DEVICE = false; public static final String DC_LIGHTS = ""; public static final int FIRMWARE_ID = 0; public static final String FIRMWARE_DESCRIPTION = "Test Firmware"; public static final Boolean IN_MAINTENANCE = false; public static final String FIRMWARE_MODULE_VERSION_COMM = "1"; public static final String FIRMWARE_MODULE_VERSION_FUNC = "2"; public static final String FIRMWARE_MODULE_VERSION_MA = "3"; public static final String FIRMWARE_MODULE_VERSION_MBUS = "4"; public static final String FIRMWARE_MODULE_VERSION_SEC = "5"; public static final String FIRMWARE_INSTALLED_BY = "Test User"; public static final String FIRMWARE_INSTALLATION_DATE = "now"; public static final Boolean FIRMWARE_FILE_EXISTS = true; public static final String FIRMWARE_FILENAME = "firmware.txt"; public static final Boolean DEVICE_IN_MAINTENANCE = false; public static final String LOCALHOST = "127.0.0.1"; public static final Integer OSLP_SERVER_PORT = 12121; public static final Integer RANDOM_DEVICE = 0; public static final Integer OSLP_ELSTER_SERVER_PORT = 12122; public static final Long DEFAULT_SCHEDULE_VERSION = 1L; public static final String SMSTYPE = "?"; public static final String LIGHTMEASUREMENT_LASTMESSAGE = ""; public static final Integer SMS_INDEX = 1; public static final boolean DEFAULT_BYPASS_RETRY = false; public static final int DEFAULT_KEY_RELAY = -1; public static final String DEFAULT_KEY_RELAYFUNCTION = "Relais 2"; public static final String DEFAULT_TYPE_DAY = "Alle dagen"; public static final String DEFAULT_TIME = "12:00"; public static final String TIMESTAMP = "now"; public static final Integer REQUESTED_PAGE = 0; public static final Integer RELAY_OVERVIEW_INDEX = 1; public static final Boolean LAST_SWITCHING_EVENT_STATE = false; public static final Boolean LAST_KNOWN_STATE = false; public static final DateTime DATE_NOW = DateTime.now(); public static final String TODAY = "today"; public static final String DEVICE_UID = "dGVzdDEyMzQ1Njc4"; public static final RelayType DEFAULT_DEVICE_OUTPUT_SETTING_RELAY_TYPE = RelayType.LIGHT; public static final String DEVICE_OUTPUT_SETTINGS = DEFAULT_DEVICE_OUTPUT_SETTING_INTERNALID + ", " + DEFAULT_DEVICE_OUTPUT_SETTING_EXTERNALID + ", " + DEFAULT_DEVICE_OUTPUT_SETTING_RELAY_TYPE + ", " + DEFAULT_DEVICE_OUTPUT_SETTING_ALIAS; public static final String RELAY_STATUSES = DEFAULT_DEVICE_OUTPUT_SETTING_INTERNALID + ", " + DEFAULT_DEVICE_OUTPUT_SETTING_EXTERNALID + ", " + DEFAULT_DEVICE_OUTPUT_SETTING_RELAY_TYPE + ", " + DEFAULT_DEVICE_OUTPUT_SETTING_ALIAS; public static final Short DEFAULT_ASTRONOMICAL_SUNRISE_OFFSET = 0; public static final Short DEFAULT_ASTRONOMICAL_SUNSET_OFFSET = 0; }
package org.runetranscriber.swingui; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.GridLayout; import java.util.ArrayList; import java.util.List; import javax.swing.BorderFactory; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSpinner; import javax.swing.SwingConstants; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import org.runetranscriber.core.FontLetter; import org.runetranscriber.core.FontTranscriber; import org.runetranscriber.core.Rune; /** * Provides a font viewer. * * @param <R> Rune type parameter. * @param <F> Font letter type parameter. */ public final class FontViewer<R extends Rune, F extends FontLetter> extends JPanel { /** Tengwar diacritics. */ private static final List<String> DIACRITICS = new ArrayList<String>(); static { DIACRITICS.add("B"); DIACRITICS.add("C"); DIACRITICS.add("D"); DIACRITICS.add("E"); DIACRITICS.add("F"); DIACRITICS.add("G"); DIACRITICS.add("H"); DIACRITICS.add("J"); DIACRITICS.add("L"); DIACRITICS.add("M"); DIACRITICS.add("N"); DIACRITICS.add("O"); DIACRITICS.add("P"); DIACRITICS.add("R"); DIACRITICS.add("T"); DIACRITICS.add("U"); DIACRITICS.add("V"); DIACRITICS.add("Y"); DIACRITICS.add("["); DIACRITICS.add("^"); DIACRITICS.add("p"); DIACRITICS.add("{"); DIACRITICS.add("|"); DIACRITICS.add("}"); DIACRITICS.add("0"); DIACRITICS.add(":"); DIACRITICS.add(";"); DIACRITICS.add("/"); DIACRITICS.add("?"); DIACRITICS.add("="); DIACRITICS.add(">"); DIACRITICS.add("\""); DIACRITICS.add("'"); } /** Page length. */ private static final int PAGE_LENGTH = 256; /** Font panel. */ private JPanel fontPanel; /** Font transcriber. */ private final FontTranscriber<R, F> fontTranscriber; /** Flag indicating if this is using the Tengwar font. */ private boolean isTengwar; /** Page number widget. */ private JSpinner pageNumberUI; /** * Construct this object. * * @param fontTranscriber Font transcriber. * @param initialPageNumber Initial page number. */ @SuppressWarnings("hiding") public FontViewer(final FontTranscriber<R, F> fontTranscriber, final int initialPageNumber) { this.fontTranscriber = fontTranscriber; System.out.println("family name = " + fontTranscriber.getFont().getFamily()); System.out.println("font name = " + fontTranscriber.getFont().getFontName()); isTengwar = "TengwarSindarin".equals(fontTranscriber.getFont().getName()); fontPanel = createFontPanel(); pageNumberUI = createPageNumberUI(); final JPanel pagePanel = new JPanel(); pagePanel.add(pageNumberUI); setLayout(new BorderLayout()); add(new JScrollPane(fontPanel), BorderLayout.CENTER); add(pagePanel, BorderLayout.SOUTH); fillFontPanel(initialPageNumber); pageNumberUI.setValue(initialPageNumber); } /** * @param pageNum Page number. */ void fillFontPanel(final int pageNum) { fontPanel.removeAll(); final int step = 16; for (int i = 0; i < 256; i += step) { final int start = (pageNum * PAGE_LENGTH) + i; final JPanel panel0 = createSubpanel(start, step); fontPanel.add(panel0); } fontPanel.revalidate(); fontPanel.repaint(); } /** * @return a new font panel. */ private JPanel createFontPanel() { final JPanel answer = new JPanel(new GridLayout2(1, 0, 10, 10)); return answer; } /** * @param text Text. * * @return a new label. */ private JLabel createLabel(final String text) { final JLabel answer = new JLabel(text); final int gap = 2; answer.setBorder(BorderFactory.createCompoundBorder(BorderFactory.createLineBorder(Color.BLACK), BorderFactory.createEmptyBorder(gap, gap, gap, gap))); answer.setHorizontalAlignment(SwingConstants.CENTER); return answer; } /** * @return a new page number widget. */ private JSpinner createPageNumberUI() { final JSpinner answer = new JSpinner(); final Dimension minSize = answer.getMinimumSize(); answer.setPreferredSize(new Dimension(100, minSize.height)); answer.addChangeListener(new ChangeListener() { @Override public void stateChanged(final ChangeEvent event) { final JSpinner spinner = (JSpinner)event.getSource(); final int pageNum = (Integer)spinner.getValue(); fillFontPanel(pageNum); } }); return answer; } /** * @param start Start index. * @param length Length. * * @return a new panel. */ private JPanel createSubpanel(final int start, final int length) { final JPanel answer = new JPanel(); answer.setBorder(BorderFactory.createLineBorder(Color.BLUE)); answer.setLayout(new GridLayout(0, 3)); for (int i = 0; i < length; i++) { final int letterInt = start + i; final String letter = Character.toString((char)letterInt); final JLabel label0 = createLabel(letter); final JLabel label1; if (isTengwar && DIACRITICS.contains(letter)) { label1 = createLabel("`" + letter); } else { label1 = createLabel(letter); } label1.setFont(fontTranscriber.getFont()); final JLabel label2 = createLabel(Integer.toString(letterInt)); answer.add(label0); answer.add(label1); answer.add(label2); } return answer; } }
package dk.statsbiblioteket.newspaper.processmonitor.backend; import dk.statsbiblioteket.medieplatform.autonomous.NotFoundException; import dk.statsbiblioteket.medieplatform.autonomous.processmonitor.datasources.DataSource; import dk.statsbiblioteket.medieplatform.autonomous.processmonitor.datasources.NotWorkingProperlyException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @Component @Scope(value = "request") public class DataSourceCombiner implements DataSource { final static Logger logger = LoggerFactory.getLogger(DataSourceCombiner.class); private List<DataSource> dataSources; public List<DataSource> getDataSources() { return dataSources; } @Resource(name = "dataSourcesList") public void setDataSources(List<DataSource> dataSources) { this.dataSources = dataSources; } @Override public List<dk.statsbiblioteket.medieplatform.autonomous.Batch> getBatches(boolean includeDetails, Map<String, String> filters) { logger.info("Call to getBatches with {} and filters {}", includeDetails, filters); Map<String, BatchResult> results = new HashMap<>(); List<dk.statsbiblioteket.medieplatform.autonomous.Batch> batches = new ArrayList<>(); for (DataSource dataSource : dataSources) { try { mergeResults(results, dataSource.getBatches(includeDetails, filters)); } catch (NotWorkingProperlyException e) { logger.error("Datasource failed", e); } } for(BatchResult br : results.values()) { batches.add(mergeBatches(br.getRoundtrip0(), br.getRoundtripMax())); } return batches; } /** * Merge a list of batches into the given result map. Will do inplace modification of the map. * * @param results the map of batches to merge the list into * @param batches the batches to merge into the map */ private void mergeResults(Map<String, BatchResult> results, List<dk.statsbiblioteket.medieplatform.autonomous.Batch> batches) { //For each batch in the list for (dk.statsbiblioteket.medieplatform.autonomous.Batch batch : batches) { //get the id String id = batch.getBatchID(); Integer roundtrip = batch.getRoundTripNumber(); BatchResult result = results.get(id); if(result == null) { result = new BatchResult(); result.resetMaxRoundTrip(roundtrip); } Integer maxRoundtrip = result.getMaxRoundTrip(); if(roundtrip == 0) { result.setRoundtrip0(mergeBatches(result.getRoundtrip0(), batch)); } else if(maxRoundtrip.equals(roundtrip)) { // Merge with existing result.setRoundtripMax(mergeBatches(result.getRoundtripMax(), batch)); } else if(roundtrip > maxRoundtrip) { // Ditch the existing and insert our own result.resetMaxRoundTrip(roundtrip); result.setRoundtripMax(mergeBatches(null, batch)); } else { // roundtrip < maxRoundtrip i.e. we ditch it. } results.put(id, result); } } /** * Merge two batches. If both is null, null is returned. If either is null, the other is returned. * For each event for the batches. If they do not overlap in eventID, both are included. * If they do overlap in eventID, the one from the batch with the highest round trip number is used. * If the batches have equal round trip number, the event from b is used. * * @param a the first batch * @param b the second batch * @return a new batch containing the merged information */ private dk.statsbiblioteket.medieplatform.autonomous.Batch mergeBatches(dk.statsbiblioteket.medieplatform.autonomous.Batch a, dk.statsbiblioteket.medieplatform.autonomous.Batch b) { if (a == null) { return b; } if (b == null) { return a; } dk.statsbiblioteket.medieplatform.autonomous.Batch result = new dk.statsbiblioteket.medieplatform.autonomous.Batch(); result.setBatchID(a.getBatchID()); boolean aIsHigher = a.getRoundTripNumber() > b.getRoundTripNumber(); if (aIsHigher) { result.setRoundTripNumber(a.getRoundTripNumber()); result.setDomsID(a.getDomsID()); } else { result.setRoundTripNumber(b.getRoundTripNumber()); result.setDomsID(b.getDomsID()); } HashMap<String, dk.statsbiblioteket.medieplatform.autonomous.Event> eventMap = new HashMap<>(); for (dk.statsbiblioteket.medieplatform.autonomous.Event event : a.getEventList()) { eventMap.put(event.getEventID(), event); } for (dk.statsbiblioteket.medieplatform.autonomous.Event event : b.getEventList()) { dk.statsbiblioteket.medieplatform.autonomous.Event existing = eventMap.get(event.getEventID()); if (existing != null) { if (!aIsHigher) { eventMap.put(event.getEventID(), event); } } else { eventMap.put(event.getEventID(), event); } } result.setEventList(new ArrayList<>(eventMap.values())); return result; } /** * Get a specific batch, by merging the results of each datasource * * @param batchID the id * @param includeDetails should details be included * @return the specific batch * @throws dk.statsbiblioteket.medieplatform.autonomous.NotFoundException */ @Override public dk.statsbiblioteket.medieplatform.autonomous.Batch getBatch(String batchID, Integer roundTripNumber, boolean includeDetails) throws NotFoundException { //Create a list of batches, at most one from each datasource List<dk.statsbiblioteket.medieplatform.autonomous.Batch> founds = new ArrayList<>(); for (DataSource dataSource : dataSources) try { founds.add(dataSource.getBatch(batchID, roundTripNumber,includeDetails)); } catch (NotWorkingProperlyException e) { logger.error("Datasource failed", e); } catch (NotFoundException ignored) { } //Merge all the found batches into one batch dk.statsbiblioteket.medieplatform.autonomous.Batch result = null; for (dk.statsbiblioteket.medieplatform.autonomous.Batch found : founds) { result = mergeBatches(result, found); } //return or throw if (result == null) { throw new NotFoundException(); } return result; } /** * Get a specific event by quering the datasources until one of them provides the event * * @param batchID the batch id * @param eventID the event id * @param includeDetails should details be included * @return the specific event * @throws NotFoundException */ @Override public dk.statsbiblioteket.medieplatform.autonomous.Event getBatchEvent(String batchID, Integer roundTripNumber, String eventID, boolean includeDetails) throws NotFoundException { for (DataSource dataSource : dataSources) { dk.statsbiblioteket.medieplatform.autonomous.Event result; try { result = dataSource.getBatchEvent(batchID, roundTripNumber,eventID, includeDetails); } catch (NotFoundException e) { continue; } catch (NotWorkingProperlyException e) { logger.error("Datasource failed", e); continue; } return result; } throw new NotFoundException(); } /** * Class for containing intermediate batch results while combining data from multiple sources. * Implemented as least-path-of-resistance to fix the merge bug in the process monitor. * TODO Make the class a first rate citizen holding the merging logic, so it's use is simplyfied from the outside. */ private class BatchResult { private dk.statsbiblioteket.medieplatform.autonomous.Batch rts; private dk.statsbiblioteket.medieplatform.autonomous.Batch rt0; private Integer maxRoundTrip = null; Integer getMaxRoundTrip() { return maxRoundTrip; } void resetMaxRoundTrip(Integer roundtrip) { maxRoundTrip = roundtrip; } dk.statsbiblioteket.medieplatform.autonomous.Batch getRoundtrip0() { return rt0; } void setRoundtrip0(dk.statsbiblioteket.medieplatform.autonomous.Batch batch) { rt0 = batch; } dk.statsbiblioteket.medieplatform.autonomous.Batch getRoundtripMax() { return rts; } void setRoundtripMax(dk.statsbiblioteket.medieplatform.autonomous.Batch batch) { rts = batch; } } }
/* Licensed under Apache-2.0 */ package com.rabidgremlin.mutters.core.util; import java.time.LocalDate; import java.time.LocalTime; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.rabidgremlin.mutters.core.IntentMatch; import com.rabidgremlin.mutters.core.SlotMatch; import com.rabidgremlin.mutters.core.session.Session; /** * This utility class provides methods working with a Session object. * * @author rabidgremlin * */ public class SessionUtils { /** Prefix for slot values stored in session to avoid any name collisions. */ public static final String SLOT_PREFIX = "SLOT_JLA1974_"; private static final String LAST_PROMPT_SUFFIX = "0987654321LASTPROMPT1234567890"; private static final String REPROMPT_HINT_SUFFIX = "0987654321REPROMPTHINT1234567890"; private static final String REPROMPT_QUICK_REPLIES_SUFFIX = "0987654321REPROMPTQUICKREPLIES1234567890"; private static final String REPROMPT_SUFFIX = "0987654321REPROMPT1234567890"; /** Logger. */ private static final Logger LOG = LoggerFactory.getLogger(SessionUtils.class); protected SessionUtils() { // utility class } /** * Removes a stored slot value from a session. * * @param session The session. * @param slotName The name of the slot. */ public static void removeSlotfromSession(Session session, String slotName) { session.removeAttribute(SLOT_PREFIX + slotName); } /** * Stores a reprompt string in a session. * * @param session The session. * @param reprompt The reprompt string. */ public static void setReprompt(Session session, String reprompt) { session.setAttribute(SLOT_PREFIX + REPROMPT_SUFFIX, reprompt); } /** * Gets the current reprompt string from the session. * * @param session The session. * @return The reprompt string or null if there is no reprompt string. */ public static String getReprompt(Session session) { return (String) session.getAttribute(SLOT_PREFIX + REPROMPT_SUFFIX); } /** * Stores the reprompt quick replies in the session. * * @param session The session. * @param repromptQuickReplies The reprompt quick replies. */ public static void setRepromptQuickReplies(Session session, List<String> repromptQuickReplies) { session.setAttribute(SLOT_PREFIX + REPROMPT_QUICK_REPLIES_SUFFIX, repromptQuickReplies); } /** * Gets the current reprompt quick replies from the session. * * @param session The session. * @return The reprompt quick replies from the session. */ @SuppressWarnings("unchecked") public static List<String> getRepromptQuickReplies(Session session) { return (List<String>) session.getAttribute(SLOT_PREFIX + REPROMPT_QUICK_REPLIES_SUFFIX); } /** * Stores a reprompt hint string in a session. * * @param session The session. * @param repromptHint The reprompt hint. */ public static void setRepromptHint(Session session, String repromptHint) { session.setAttribute(SLOT_PREFIX + REPROMPT_HINT_SUFFIX, repromptHint); } /** * Gets the current reprompt hint from the session. * * @param session The session. * @return The current reprompt hint or null if there is no reprompt hint. */ public static String getRepromptHint(Session session) { return (String) session.getAttribute(SLOT_PREFIX + REPROMPT_HINT_SUFFIX); } /** * Stores a Number slot value in the session. * * @param session The session. * @param slotName The name of the slot. * @param value The value to store. */ public static void setNumberSlotIntoSession(Session session, String slotName, Number value) { session.setAttribute(SLOT_PREFIX + slotName, value); } /** * Stores a String slot value in the session. * * @param session The session. * @param slotName The name of the slot. * @param value The value to store. */ public static void setStringSlotIntoSession(Session session, String slotName, String value) { session.setAttribute(SLOT_PREFIX + slotName, value); } /** * Stores a {@link LocalDate} slot value in the session. * * @param session The session. * @param slotName The name of the slot. * @param value The value to store. */ public static void setLocalDateSlotIntoSession(Session session, String slotName, LocalDate value) { session.setAttribute(SLOT_PREFIX + slotName, value); } /** * Stores a {@link LocalTime} slot value in the session. * * @param session The session. * @param slotName The name of the slot. * @param value The value to store. */ public static void setLocalTimeSlotIntoSession(Session session, String slotName, LocalTime value) { session.setAttribute(SLOT_PREFIX + slotName, value); } /** * Gets a String based slot value from an intent match. * * @param match The intent match to get the slot value from. * @param slotName The name of the slot. * @param defaultValue The default value to use if no slot found. * @return The string value. */ public static String getStringSlot(IntentMatch match, String slotName, String defaultValue) { if (match.getSlotMatch(slotName) != null && match.getSlotMatch(slotName).getValue() != null) { try { return (String) match.getSlotMatch(slotName).getValue(); } catch (ClassCastException e) { // failed to cast so assume invalid string and return default LOG.warn("Non String value: {} found in slot {}", match.getSlotMatch(slotName).getValue(), slotName); return defaultValue; } } else { return defaultValue; } } /** * Gets a Number based slot value from an intent match. * * @param match The intent match to get the slot value from. * @param slotName The name of the slot. * @param defaultValue The default value to use if no slot found. * @return The string value. */ public static Number getNumberSlot(IntentMatch match, String slotName, Number defaultValue) { if (match.getSlotMatch(slotName) != null && match.getSlotMatch(slotName).getValue() != null) { try { return (Number) match.getSlotMatch(slotName).getValue(); } catch (ClassCastException e) { // failed to cast so assume invalid number and return default LOG.warn("Non Number value: {} found in slot {}", match.getSlotMatch(slotName).getValue(), slotName); return defaultValue; } } else { return defaultValue; } } /** * Gets a {@link LocalDate} based slot value from an intent match. * * @param match The intent match to get the slot value from. * @param slotName The name of the slot. * @param defaultValue The default value to use if no slot found. * @return The local date value. */ public static LocalDate getLocalDateSlot(IntentMatch match, String slotName, LocalDate defaultValue) { if (match.getSlotMatch(slotName) != null && match.getSlotMatch(slotName).getValue() != null) { try { return (LocalDate) match.getSlotMatch(slotName).getValue(); } catch (ClassCastException e) { // failed to cast so assume invalid localdate and return default LOG.warn("Non LocalDate value: {} found in slot {}", match.getSlotMatch(slotName).getValue(), slotName); return defaultValue; } } else { return defaultValue; } } /** * Gets a {@link LocalTime} based slot value from an intent match. * * @param match The intent match to get the slot value from. * @param slotName The name of the slot. * @param defaultValue The default value to use if no slot found. * @return The local time value. */ public static LocalTime getLocalTimeSlot(IntentMatch match, String slotName, LocalTime defaultValue) { if (match.getSlotMatch(slotName) != null && match.getSlotMatch(slotName).getValue() != null) { try { return (LocalTime) match.getSlotMatch(slotName).getValue(); } catch (ClassCastException e) { // failed to cast so assume invalid localtime and return default LOG.warn("Non LocalTime value: {} found in slot {}", match.getSlotMatch(slotName).getValue(), slotName); return defaultValue; } } else { return defaultValue; } } /** * Saves all the matched slots for an IntentMatch into the session. * * @param match The intent match. * @param session The session. */ public static void saveSlotsToSession(IntentMatch match, Session session) { for (SlotMatch<?> slotMatch : match.getSlotMatches().values()) { session.setAttribute(SLOT_PREFIX + slotMatch.getSlot().getName(), slotMatch.getValue()); } } /** * Gets a String value from the session (if it exists) or the slot (if a match * exists). * * @param match The intent match. * @param session The session. * @param slotName The name of the slot. * @param defaultValue The default value if not value found in the session or * slot. * @return The string value. */ public static String getStringFromSlotOrSession(IntentMatch match, Session session, String slotName, String defaultValue) { String sessionValue = (String) session.getAttribute(SLOT_PREFIX + slotName); if (sessionValue != null) { return sessionValue; } return getStringSlot(match, slotName, defaultValue); } /** * Gets a Number value from the session (if it exists) or the slot (if a match * exists). * * @param match The intent match. * @param session The session. * @param slotName The name of the slot. * @param defaultValue The default value if not value found in the session or * slot. * @return The number value. */ public static Number getNumberFromSlotOrSession(IntentMatch match, Session session, String slotName, Number defaultValue) { Number sessionValue = (Number) session.getAttribute(SLOT_PREFIX + slotName); if (sessionValue != null) { return sessionValue; } return getNumberSlot(match, slotName, defaultValue); } /** * Gets a {@link LocalDate} value from the session (if it exists) or the slot * (if a match exists). * * @param match The intent match. * @param session The session. * @param slotName The name of the slot. * @param defaultValue The default value if not value found in the session or * slot. * @return The local date value. */ public static LocalDate getLocalDateFromSlotOrSession(IntentMatch match, Session session, String slotName, LocalDate defaultValue) { LocalDate sessionValue = (LocalDate) session.getAttribute(SLOT_PREFIX + slotName); if (sessionValue != null) { return sessionValue; } return getLocalDateSlot(match, slotName, defaultValue); } /** * Gets a {@link LocalTime} value from the session (if it exists) or the slot * (if a match exists). * * @param match The intent match. * @param session The session. * @param slotName The name of the slot. * @param defaultValue The default value if not value found in the session or * slot. * @return The local time value. */ public static LocalTime getLocalTimeFromSlotOrSession(IntentMatch match, Session session, String slotName, LocalTime defaultValue) { LocalTime sessionValue = (LocalTime) session.getAttribute(SLOT_PREFIX + slotName); if (sessionValue != null) { return sessionValue; } return getLocalTimeSlot(match, slotName, defaultValue); } /** * Stores the text of the last prompt that was sent to the user. * * @param session The session. * @param lastPrompt The last prompt text. */ public static void setLastPrompt(Session session, String lastPrompt) { session.setAttribute(SLOT_PREFIX + LAST_PROMPT_SUFFIX, lastPrompt); } /** * Gets the text of the last prompt that was sent to the user. * * @param session The session. * @return The last prompt text or null if there is no last prompt text. */ public static String getLastPrompt(Session session) { return (String) session.getAttribute(SLOT_PREFIX + LAST_PROMPT_SUFFIX); } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ */ package java.util.concurrent.locks; import java.util.concurrent.TimeUnit; import java.util.Collection; /** * A reentrant mutual exclusion {@link Lock} with the same basic * behavior and semantics as the implicit monitor lock accessed using * {@code synchronized} methods and statements, but with extended * capabilities. * * <p>A {@code ReentrantLock} is <em>owned</em> by the thread last * successfully locking, but not yet unlocking it. A thread invoking * {@code lock} will return, successfully acquiring the lock, when * the lock is not owned by another thread. The method will return * immediately if the current thread already owns the lock. This can * be checked using methods {@link #isHeldByCurrentThread}, and {@link * #getHoldCount}. * * <p>The constructor for this class accepts an optional * <em>fairness</em> parameter. When set {@code true}, under * contention, locks favor granting access to the longest-waiting * thread. Otherwise this lock does not guarantee any particular * access order. Programs using fair locks accessed by many threads * may display lower overall throughput (i.e., are slower; often much * slower) than those using the default setting, but have smaller * variances in times to obtain locks and guarantee lack of * starvation. Note however, that fairness of locks does not guarantee * fairness of thread scheduling. Thus, one of many threads using a * fair lock may obtain it multiple times in succession while other * active threads are not progressing and not currently holding the * lock. * Also note that the untimed {@link #tryLock()} method does not * honor the fairness setting. It will succeed if the lock * is available even if other threads are waiting. * * <p>It is recommended practice to <em>always</em> immediately * follow a call to {@code lock} with a {@code try} block, most * typically in a before/after construction such as: * * <pre> {@code * class X { * private final ReentrantLock lock = new ReentrantLock(); * // ... * * public void m() { * lock.lock(); // block until condition holds * try { * // ... method body * } finally { * lock.unlock() * } * } * }}</pre> * * <p>In addition to implementing the {@link Lock} interface, this * class defines a number of {@code public} and {@code protected} * methods for inspecting the state of the lock. Some of these * methods are only useful for instrumentation and monitoring. * * <p>Serialization of this class behaves in the same way as built-in * locks: a deserialized lock is in the unlocked state, regardless of * its state when serialized. * * <p>This lock supports a maximum of 2147483647 recursive locks by * the same thread. Attempts to exceed this limit result in * {@link Error} throws from locking methods. * * @since 1.5 * @author Doug Lea */ public class ReentrantLock implements Lock, java.io.Serializable { private static final long serialVersionUID = 7373984872572414699L; /** Synchronizer providing all implementation mechanics */ private final Sync sync; /** * Base of synchronization control for this lock. Subclassed * into fair and nonfair versions below. Uses AQS state to * represent the number of holds on the lock. */ abstract static class Sync extends AbstractQueuedSynchronizer { private static final long serialVersionUID = -5179523762034025860L; /** * Performs {@link Lock#lock}. The main reason for subclassing * is to allow fast path for nonfair version. */ abstract void lock(); /** * Performs non-fair tryLock. tryAcquire is implemented in * subclasses, but both need nonfair try for trylock method. */ final boolean nonfairTryAcquire(int acquires) { final Thread current = Thread.currentThread(); int c = getState(); if (c == 0) { if (compareAndSetState(0, acquires)) { setExclusiveOwnerThread(current); return true; } } else if (current == getExclusiveOwnerThread()) { int nextc = c + acquires; if (nextc < 0) // overflow throw new Error("Maximum lock count exceeded"); setState(nextc); return true; } return false; } protected final boolean tryRelease(int releases) { int c = getState() - releases; if (Thread.currentThread() != getExclusiveOwnerThread()) throw new IllegalMonitorStateException(); boolean free = false; if (c == 0) { free = true; setExclusiveOwnerThread(null); } setState(c); return free; } protected final boolean isHeldExclusively() { // While we must in general read state before owner, // we don't need to do so to check if current thread is owner return getExclusiveOwnerThread() == Thread.currentThread(); } final ConditionObject newCondition() { return new ConditionObject(); } // Methods relayed from outer class final Thread getOwner() { return getState() == 0 ? null : getExclusiveOwnerThread(); } final int getHoldCount() { return isHeldExclusively() ? getState() : 0; } final boolean isLocked() { return getState() != 0; } /** * Reconstitutes the instance from a stream (that is, deserializes it). */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); setState(0); // reset to unlocked state } } /** * Sync object for non-fair locks */ static final class NonfairSync extends Sync { private static final long serialVersionUID = 7316153563782823691L; /** * Performs lock. Try immediate barge, backing up to normal * acquire on failure. */ final void lock() { if (compareAndSetState(0, 1)) setExclusiveOwnerThread(Thread.currentThread()); else acquire(1); } protected final boolean tryAcquire(int acquires) { return nonfairTryAcquire(acquires); } } /** * Sync object for fair locks */ static final class FairSync extends Sync { private static final long serialVersionUID = -3000897897090466540L; final void lock() { acquire(1); } /** * Fair version of tryAcquire. Don't grant access unless * recursive call or no waiters or is first. */ protected final boolean tryAcquire(int acquires) { final Thread current = Thread.currentThread(); int c = getState(); if (c == 0) { if (!hasQueuedPredecessors() && compareAndSetState(0, acquires)) { setExclusiveOwnerThread(current); return true; } } else if (current == getExclusiveOwnerThread()) { int nextc = c + acquires; if (nextc < 0) throw new Error("Maximum lock count exceeded"); setState(nextc); return true; } return false; } } /** * Creates an instance of {@code ReentrantLock}. * This is equivalent to using {@code ReentrantLock(false)}. */ public ReentrantLock() { sync = new NonfairSync(); } /** * Creates an instance of {@code ReentrantLock} with the * given fairness policy. * * @param fair {@code true} if this lock should use a fair ordering policy */ public ReentrantLock(boolean fair) { sync = fair ? new FairSync() : new NonfairSync(); } /** * Acquires the lock. * * <p>Acquires the lock if it is not held by another thread and returns * immediately, setting the lock hold count to one. * * <p>If the current thread already holds the lock then the hold * count is incremented by one and the method returns immediately. * * <p>If the lock is held by another thread then the * current thread becomes disabled for thread scheduling * purposes and lies dormant until the lock has been acquired, * at which time the lock hold count is set to one. */ public void lock() { sync.lock(); } /** * Acquires the lock unless the current thread is * {@linkplain Thread#interrupt interrupted}. * * <p>Acquires the lock if it is not held by another thread and returns * immediately, setting the lock hold count to one. * * <p>If the current thread already holds this lock then the hold count * is incremented by one and the method returns immediately. * * <p>If the lock is held by another thread then the * current thread becomes disabled for thread scheduling * purposes and lies dormant until one of two things happens: * * <ul> * * <li>The lock is acquired by the current thread; or * * <li>Some other thread {@linkplain Thread#interrupt interrupts} the * current thread. * * </ul> * * <p>If the lock is acquired by the current thread then the lock hold * count is set to one. * * <p>If the current thread: * * <ul> * * <li>has its interrupted status set on entry to this method; or * * <li>is {@linkplain Thread#interrupt interrupted} while acquiring * the lock, * * </ul> * * then {@link InterruptedException} is thrown and the current thread's * interrupted status is cleared. * * <p>In this implementation, as this method is an explicit * interruption point, preference is given to responding to the * interrupt over normal or reentrant acquisition of the lock. * * @throws InterruptedException if the current thread is interrupted */ public void lockInterruptibly() throws InterruptedException { sync.acquireInterruptibly(1); } /** * Acquires the lock only if it is not held by another thread at the time * of invocation. * * <p>Acquires the lock if it is not held by another thread and * returns immediately with the value {@code true}, setting the * lock hold count to one. Even when this lock has been set to use a * fair ordering policy, a call to {@code tryLock()} <em>will</em> * immediately acquire the lock if it is available, whether or not * other threads are currently waiting for the lock. * This &quot;barging&quot; behavior can be useful in certain * circumstances, even though it breaks fairness. If you want to honor * the fairness setting for this lock, then use * {@link #tryLock(long, TimeUnit) tryLock(0, TimeUnit.SECONDS) } * which is almost equivalent (it also detects interruption). * * <p>If the current thread already holds this lock then the hold * count is incremented by one and the method returns {@code true}. * * <p>If the lock is held by another thread then this method will return * immediately with the value {@code false}. * * @return {@code true} if the lock was free and was acquired by the * current thread, or the lock was already held by the current * thread; and {@code false} otherwise */ public boolean tryLock() { return sync.nonfairTryAcquire(1); } /** * Acquires the lock if it is not held by another thread within the given * waiting time and the current thread has not been * {@linkplain Thread#interrupt interrupted}. * * <p>Acquires the lock if it is not held by another thread and returns * immediately with the value {@code true}, setting the lock hold count * to one. If this lock has been set to use a fair ordering policy then * an available lock <em>will not</em> be acquired if any other threads * are waiting for the lock. This is in contrast to the {@link #tryLock()} * method. If you want a timed {@code tryLock} that does permit barging on * a fair lock then combine the timed and un-timed forms together: * * <pre> {@code * if (lock.tryLock() || * lock.tryLock(timeout, unit)) { * ... * }}</pre> * * <p>If the current thread * already holds this lock then the hold count is incremented by one and * the method returns {@code true}. * * <p>If the lock is held by another thread then the * current thread becomes disabled for thread scheduling * purposes and lies dormant until one of three things happens: * * <ul> * * <li>The lock is acquired by the current thread; or * * <li>Some other thread {@linkplain Thread#interrupt interrupts} * the current thread; or * * <li>The specified waiting time elapses * * </ul> * * <p>If the lock is acquired then the value {@code true} is returned and * the lock hold count is set to one. * * <p>If the current thread: * * <ul> * * <li>has its interrupted status set on entry to this method; or * * <li>is {@linkplain Thread#interrupt interrupted} while * acquiring the lock, * * </ul> * then {@link InterruptedException} is thrown and the current thread's * interrupted status is cleared. * * <p>If the specified waiting time elapses then the value {@code false} * is returned. If the time is less than or equal to zero, the method * will not wait at all. * * <p>In this implementation, as this method is an explicit * interruption point, preference is given to responding to the * interrupt over normal or reentrant acquisition of the lock, and * over reporting the elapse of the waiting time. * * @param timeout the time to wait for the lock * @param unit the time unit of the timeout argument * @return {@code true} if the lock was free and was acquired by the * current thread, or the lock was already held by the current * thread; and {@code false} if the waiting time elapsed before * the lock could be acquired * @throws InterruptedException if the current thread is interrupted * @throws NullPointerException if the time unit is null */ public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException { return sync.tryAcquireNanos(1, unit.toNanos(timeout)); } /** * Attempts to release this lock. * * <p>If the current thread is the holder of this lock then the hold * count is decremented. If the hold count is now zero then the lock * is released. If the current thread is not the holder of this * lock then {@link IllegalMonitorStateException} is thrown. * * @throws IllegalMonitorStateException if the current thread does not * hold this lock */ public void unlock() { sync.release(1); } /** * Returns a {@link Condition} instance for use with this * {@link Lock} instance. * * <p>The returned {@link Condition} instance supports the same * usages as do the {@link Object} monitor methods ({@link * Object#wait() wait}, {@link Object#notify notify}, and {@link * Object#notifyAll notifyAll}) when used with the built-in * monitor lock. * * <ul> * * <li>If this lock is not held when any of the {@link Condition} * {@linkplain Condition#await() waiting} or {@linkplain * Condition#signal signalling} methods are called, then an {@link * IllegalMonitorStateException} is thrown. * * <li>When the condition {@linkplain Condition#await() waiting} * methods are called the lock is released and, before they * return, the lock is reacquired and the lock hold count restored * to what it was when the method was called. * * <li>If a thread is {@linkplain Thread#interrupt interrupted} * while waiting then the wait will terminate, an {@link * InterruptedException} will be thrown, and the thread's * interrupted status will be cleared. * * <li> Waiting threads are signalled in FIFO order. * * <li>The ordering of lock reacquisition for threads returning * from waiting methods is the same as for threads initially * acquiring the lock, which is in the default case not specified, * but for <em>fair</em> locks favors those threads that have been * waiting the longest. * * </ul> * * @return the Condition object */ public Condition newCondition() { return sync.newCondition(); } /** * Queries the number of holds on this lock by the current thread. * * <p>A thread has a hold on a lock for each lock action that is not * matched by an unlock action. * * <p>The hold count information is typically only used for testing and * debugging purposes. For example, if a certain section of code should * not be entered with the lock already held then we can assert that * fact: * * <pre> {@code * class X { * ReentrantLock lock = new ReentrantLock(); * // ... * public void m() { * assert lock.getHoldCount() == 0; * lock.lock(); * try { * // ... method body * } finally { * lock.unlock(); * } * } * }}</pre> * * @return the number of holds on this lock by the current thread, * or zero if this lock is not held by the current thread */ public int getHoldCount() { return sync.getHoldCount(); } /** * Queries if this lock is held by the current thread. * * <p>Analogous to the {@link Thread#holdsLock(Object)} method for * built-in monitor locks, this method is typically used for * debugging and testing. For example, a method that should only be * called while a lock is held can assert that this is the case: * * <pre> {@code * class X { * ReentrantLock lock = new ReentrantLock(); * // ... * * public void m() { * assert lock.isHeldByCurrentThread(); * // ... method body * } * }}</pre> * * <p>It can also be used to ensure that a reentrant lock is used * in a non-reentrant manner, for example: * * <pre> {@code * class X { * ReentrantLock lock = new ReentrantLock(); * // ... * * public void m() { * assert !lock.isHeldByCurrentThread(); * lock.lock(); * try { * // ... method body * } finally { * lock.unlock(); * } * } * }}</pre> * * @return {@code true} if current thread holds this lock and * {@code false} otherwise */ public boolean isHeldByCurrentThread() { return sync.isHeldExclusively(); } /** * Queries if this lock is held by any thread. This method is * designed for use in monitoring of the system state, * not for synchronization control. * * @return {@code true} if any thread holds this lock and * {@code false} otherwise */ public boolean isLocked() { return sync.isLocked(); } /** * Returns {@code true} if this lock has fairness set true. * * @return {@code true} if this lock has fairness set true */ public final boolean isFair() { return sync instanceof FairSync; } /** * Returns the thread that currently owns this lock, or * {@code null} if not owned. When this method is called by a * thread that is not the owner, the return value reflects a * best-effort approximation of current lock status. For example, * the owner may be momentarily {@code null} even if there are * threads trying to acquire the lock but have not yet done so. * This method is designed to facilitate construction of * subclasses that provide more extensive lock monitoring * facilities. * * @return the owner, or {@code null} if not owned */ protected Thread getOwner() { return sync.getOwner(); } /** * Queries whether any threads are waiting to acquire this lock. Note that * because cancellations may occur at any time, a {@code true} * return does not guarantee that any other thread will ever * acquire this lock. This method is designed primarily for use in * monitoring of the system state. * * @return {@code true} if there may be other threads waiting to * acquire the lock */ public final boolean hasQueuedThreads() { return sync.hasQueuedThreads(); } /** * Queries whether the given thread is waiting to acquire this * lock. Note that because cancellations may occur at any time, a * {@code true} return does not guarantee that this thread * will ever acquire this lock. This method is designed primarily for use * in monitoring of the system state. * * @param thread the thread * @return {@code true} if the given thread is queued waiting for this lock * @throws NullPointerException if the thread is null */ public final boolean hasQueuedThread(Thread thread) { return sync.isQueued(thread); } /** * Returns an estimate of the number of threads waiting to * acquire this lock. The value is only an estimate because the number of * threads may change dynamically while this method traverses * internal data structures. This method is designed for use in * monitoring of the system state, not for synchronization * control. * * @return the estimated number of threads waiting for this lock */ public final int getQueueLength() { return sync.getQueueLength(); } /** * Returns a collection containing threads that may be waiting to * acquire this lock. Because the actual set of threads may change * dynamically while constructing this result, the returned * collection is only a best-effort estimate. The elements of the * returned collection are in no particular order. This method is * designed to facilitate construction of subclasses that provide * more extensive monitoring facilities. * * @return the collection of threads */ protected Collection<Thread> getQueuedThreads() { return sync.getQueuedThreads(); } /** * Queries whether any threads are waiting on the given condition * associated with this lock. Note that because timeouts and * interrupts may occur at any time, a {@code true} return does * not guarantee that a future {@code signal} will awaken any * threads. This method is designed primarily for use in * monitoring of the system state. * * @param condition the condition * @return {@code true} if there are any waiting threads * @throws IllegalMonitorStateException if this lock is not held * @throws IllegalArgumentException if the given condition is * not associated with this lock * @throws NullPointerException if the condition is null */ public boolean hasWaiters(Condition condition) { if (condition == null) throw new NullPointerException(); if (!(condition instanceof AbstractQueuedSynchronizer.ConditionObject)) throw new IllegalArgumentException("not owner"); return sync.hasWaiters((AbstractQueuedSynchronizer.ConditionObject)condition); } /** * Returns an estimate of the number of threads waiting on the * given condition associated with this lock. Note that because * timeouts and interrupts may occur at any time, the estimate * serves only as an upper bound on the actual number of waiters. * This method is designed for use in monitoring of the system * state, not for synchronization control. * * @param condition the condition * @return the estimated number of waiting threads * @throws IllegalMonitorStateException if this lock is not held * @throws IllegalArgumentException if the given condition is * not associated with this lock * @throws NullPointerException if the condition is null */ public int getWaitQueueLength(Condition condition) { if (condition == null) throw new NullPointerException(); if (!(condition instanceof AbstractQueuedSynchronizer.ConditionObject)) throw new IllegalArgumentException("not owner"); return sync.getWaitQueueLength((AbstractQueuedSynchronizer.ConditionObject)condition); } /** * Returns a collection containing those threads that may be * waiting on the given condition associated with this lock. * Because the actual set of threads may change dynamically while * constructing this result, the returned collection is only a * best-effort estimate. The elements of the returned collection * are in no particular order. This method is designed to * facilitate construction of subclasses that provide more * extensive condition monitoring facilities. * * @param condition the condition * @return the collection of threads * @throws IllegalMonitorStateException if this lock is not held * @throws IllegalArgumentException if the given condition is * not associated with this lock * @throws NullPointerException if the condition is null */ protected Collection<Thread> getWaitingThreads(Condition condition) { if (condition == null) throw new NullPointerException(); if (!(condition instanceof AbstractQueuedSynchronizer.ConditionObject)) throw new IllegalArgumentException("not owner"); return sync.getWaitingThreads((AbstractQueuedSynchronizer.ConditionObject)condition); } /** * Returns a string identifying this lock, as well as its lock state. * The state, in brackets, includes either the String {@code "Unlocked"} * or the String {@code "Locked by"} followed by the * {@linkplain Thread#getName name} of the owning thread. * * @return a string identifying this lock, as well as its lock state */ public String toString() { Thread o = sync.getOwner(); return super.toString() + ((o == null) ? "[Unlocked]" : "[Locked by thread " + o.getName() + "]"); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.script.ScriptService; import java.util.Locale; import java.util.Set; import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.common.util.set.Sets.newHashSet; /** * This service is responsible for upgrading legacy index metadata to the current version * <p> * Every time an existing index is introduced into cluster this service should be used * to upgrade the existing index metadata to the latest version of the cluster. It typically * occurs during cluster upgrade, when dangling indices are imported into the cluster or indices * are restored from a repository. */ public class MetaDataIndexUpgradeService extends AbstractComponent { private final ScriptService scriptService; @Inject public MetaDataIndexUpgradeService(Settings settings, ScriptService scriptService) { super(settings); this.scriptService = scriptService; } /** * Checks that the index can be upgraded to the current version of the master node. * * <p> * If the index does not need upgrade it returns the index metadata unchanged, otherwise it returns a modified index metadata. If index * cannot be updated the method throws an exception. */ public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { // Throws an exception if there are too-old segments: if (isUpgraded(indexMetaData)) { return indexMetaData; } checkSupportedVersion(indexMetaData); IndexMetaData newMetaData = indexMetaData; newMetaData = addDefaultUnitsIfNeeded(newMetaData); checkMappingsCompatibility(newMetaData); newMetaData = markAsUpgraded(newMetaData); return newMetaData; } /** * Checks if the index was already opened by this version of Elasticsearch and doesn't require any additional checks. */ private boolean isUpgraded(IndexMetaData indexMetaData) { return indexMetaData.upgradeVersion().onOrAfter(Version.V_3_0_0); } /** * Elasticsearch 3.0 no longer supports indices with pre Lucene v5.0 (Elasticsearch v2.0.0.beta1) segments. All indices * that were created before Elasticsearch v2.0.0.beta1 should be upgraded using upgrade API before they can * be open by this version of elasticsearch. */ private void checkSupportedVersion(IndexMetaData indexMetaData) { if (indexMetaData.getState() == IndexMetaData.State.OPEN && isSupportedVersion(indexMetaData) == false) { throw new IllegalStateException("The index [" + indexMetaData.getIndex() + "] was created before v2.0.0.beta1 and wasn't upgraded." + " This index should be open using a version before " + Version.CURRENT.minimumCompatibilityVersion() + " and upgraded using the upgrade API."); } } /* * Returns true if this index can be supported by the current version of elasticsearch */ private static boolean isSupportedVersion(IndexMetaData indexMetaData) { if (indexMetaData.creationVersion().onOrAfter(Version.V_2_0_0_beta1)) { // The index was created with elasticsearch that was using Lucene 5.2.1 return true; } if (indexMetaData.getMinimumCompatibleVersion() != null && indexMetaData.getMinimumCompatibleVersion().onOrAfter(org.apache.lucene.util.Version.LUCENE_5_0_0)) { //The index was upgraded we can work with it return true; } return false; } /** All known byte-sized settings for an index. */ public static final Set<String> INDEX_BYTES_SIZE_SETTINGS = unmodifiableSet(newHashSet( "index.merge.policy.floor_segment", "index.merge.policy.max_merged_segment", "index.merge.policy.max_merge_size", "index.merge.policy.min_merge_size", "index.shard.recovery.file_chunk_size", "index.shard.recovery.translog_size", "index.store.throttle.max_bytes_per_sec", "index.translog.flush_threshold_size", "index.translog.fs.buffer_size", "index.version_map_size")); /** All known time settings for an index. */ public static final Set<String> INDEX_TIME_SETTINGS = unmodifiableSet(newHashSet( "index.gateway.wait_for_mapping_update_post_recovery", "index.shard.wait_for_mapping_update_post_recovery", "index.gc_deletes", "index.indexing.slowlog.threshold.index.debug", "index.indexing.slowlog.threshold.index.info", "index.indexing.slowlog.threshold.index.trace", "index.indexing.slowlog.threshold.index.warn", "index.refresh_interval", "index.search.slowlog.threshold.fetch.debug", "index.search.slowlog.threshold.fetch.info", "index.search.slowlog.threshold.fetch.trace", "index.search.slowlog.threshold.fetch.warn", "index.search.slowlog.threshold.query.debug", "index.search.slowlog.threshold.query.info", "index.search.slowlog.threshold.query.trace", "index.search.slowlog.threshold.query.warn", "index.shadow.wait_for_initial_commit", "index.store.stats_refresh_interval", "index.translog.flush_threshold_period", "index.translog.interval", "index.translog.sync_interval", UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING)); /** * Elasticsearch 2.0 requires units on byte/memory and time settings; this method adds the default unit to any such settings that are * missing units. */ private IndexMetaData addDefaultUnitsIfNeeded(IndexMetaData indexMetaData) { if (indexMetaData.getCreationVersion().before(Version.V_2_0_0_beta1)) { // TODO: can we somehow only do this *once* for a pre-2.0 index? Maybe we could stuff a "fake marker setting" here? Seems hackish... // Created lazily if we find any settings that are missing units: Settings settings = indexMetaData.settings(); Settings.Builder newSettings = null; for(String byteSizeSetting : INDEX_BYTES_SIZE_SETTINGS) { String value = settings.get(byteSizeSetting); if (value != null) { try { Long.parseLong(value); } catch (NumberFormatException nfe) { continue; } // It's a naked number that previously would be interpreted as default unit (bytes); now we add it: logger.warn("byte-sized index setting [{}] with value [{}] is missing units; assuming default units (b) but in future versions this will be a hard error", byteSizeSetting, value); if (newSettings == null) { newSettings = Settings.builder(); newSettings.put(settings); } newSettings.put(byteSizeSetting, value + "b"); } } for(String timeSetting : INDEX_TIME_SETTINGS) { String value = settings.get(timeSetting); if (value != null) { try { Long.parseLong(value); } catch (NumberFormatException nfe) { continue; } // It's a naked number that previously would be interpreted as default unit (ms); now we add it: logger.warn("time index setting [{}] with value [{}] is missing units; assuming default units (ms) but in future versions this will be a hard error", timeSetting, value); if (newSettings == null) { newSettings = Settings.builder(); newSettings.put(settings); } newSettings.put(timeSetting, value + "ms"); } } if (newSettings != null) { // At least one setting was changed: return IndexMetaData.builder(indexMetaData) .version(indexMetaData.version()) .settings(newSettings.build()) .build(); } } // No changes: return indexMetaData; } /** * Checks the mappings for compatibility with the current version */ private void checkMappingsCompatibility(IndexMetaData indexMetaData) { Index index = new Index(indexMetaData.getIndex()); Settings settings = indexMetaData.settings(); try { SimilarityService similarityService = new SimilarityService(index, settings); // We cannot instantiate real analysis server at this point because the node might not have // been started yet. However, we don't really need real analyzers at this stage - so we can fake it try (AnalysisService analysisService = new FakeAnalysisService(index, settings)) { try (MapperService mapperService = new MapperService(index, settings, analysisService, similarityService, scriptService)) { for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMetaData = cursor.value; mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), false, false); } } } } catch (Exception ex) { // Wrap the inner exception so we have the index name in the exception message throw new IllegalStateException("unable to upgrade the mappings for the index [" + indexMetaData.getIndex() + "], reason: [" + ex.getMessage() + "]", ex); } } /** * Marks index as upgraded so we don't have to test it again */ private IndexMetaData markAsUpgraded(IndexMetaData indexMetaData) { Settings settings = Settings.builder().put(indexMetaData.settings()).put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT).build(); return IndexMetaData.builder(indexMetaData).settings(settings).build(); } /** * A fake analysis server that returns the same keyword analyzer for all requests */ private static class FakeAnalysisService extends AnalysisService { private Analyzer fakeAnalyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { throw new UnsupportedOperationException("shouldn't be here"); } }; public FakeAnalysisService(Index index, Settings indexSettings) { super(index, indexSettings); } @Override public NamedAnalyzer analyzer(String name) { return new NamedAnalyzer(name, fakeAnalyzer); } @Override public void close() { fakeAnalyzer.close(); super.close(); } } }
package org.get.oxicam.clinicalguide.xml.stats; import java.util.ArrayList; import org.get.oxicam.clinicalguide.ClinicalGuideActivity; import org.get.oxicam.clinicalguide.xml.ParserHelper; import org.w3c.dom.Element; import org.w3c.dom.NodeList; public class StatsQuestionFactory { private static int typeInt = -1; /** * Creates the specific question object depending on the type attribute of * the question * * @param e Element with a tagname "question" * @return A child class of FormStatsQuestion depending on the attributes and childnodes. */ public static AbstractStatsQuestion createQuestion(Element e, ClinicalGuideActivity mActivity) { if (!e.getTagName().equalsIgnoreCase("question")) { throw new IllegalArgumentException("Not a question tag"); } String type = ParserHelper.requiredAttributeGetter(e, "type"); typeInt = AbstractStatsQuestion.getQuestionTypeInt(type); switch (typeInt) { case AbstractStatsQuestion.QUESTION_TYPE_COUNT: return createCountQuestion(e, mActivity); case AbstractStatsQuestion.QUESTION_TYPE_LIST: return createListQuestion(e, mActivity); case AbstractStatsQuestion.QUESTION_TYPE_MIN: case AbstractStatsQuestion.QUESTION_TYPE_MAX: return createExtremaQuestion(e, mActivity); case AbstractStatsQuestion.QUESTION_TYPE_PERCENTAGE: return createPercentageQuestion(e, mActivity); case AbstractStatsQuestion.QUESTION_TYPE_RATIO: return createRatioQuestion(e, mActivity); case AbstractStatsQuestion.QUESTION_TYPE_AVERAGE: return createAverageQuestion(e, mActivity); default: throw new IllegalArgumentException("Question type not existing"); } } private static AbstractStatsQuestion createListQuestion(Element e, ClinicalGuideActivity mActivity){ AbstractStatsQuestion retVal = null; ArrayList<StatsSubject> arrSubj = new ArrayList<StatsSubject>(); ArrayList<StatsConstraint> arrConst = new ArrayList<StatsConstraint>(); StatsCompareConstraint compareConstraint = null; NodeList subjects = e.getElementsByTagName("subject"); NodeList constraints = e.getElementsByTagName("statsconstraint"); NodeList compareConstraintNodeList = e.getElementsByTagName("compareconstraint"); NodeList timespan = e.getElementsByTagName("timespan"); int subjLen = subjects.getLength(); int constLen = constraints.getLength(); int spanLen = timespan.getLength(); int compLen = compareConstraintNodeList.getLength(); if(spanLen > 1 || compLen > 1){ throw new IllegalArgumentException("There can only be one timespan or 1 comparecontraint in a question"); } for(int i = 0; i < subjLen; i++){ arrSubj.add(ParserHelper.getStatsSubjectDetails((Element) subjects.item(i))); } for(int i = 0; i < constLen; i++){ arrConst.add(ParserHelper.getStatsConstraintDetails((Element)constraints.item(i))); } if(compLen == 1){ Element compConstNode = (Element)compareConstraintNodeList.item(0); compareConstraint = ParserHelper.getCompareConstraintDetails(compConstNode); } StatsTimespan qTimespan = spanLen == 1 ? ParserHelper.getTimespanDetails((Element)timespan.item(0)) : null; retVal = new StatsQuestionList(mActivity, arrSubj, e.getAttribute("distinct").equalsIgnoreCase("true"), arrConst, qTimespan, compareConstraint); return retVal; } private static AbstractStatsQuestion createRatioQuestion(Element e, ClinicalGuideActivity mActivity){ AbstractStatsQuestion retVal = null; ArrayList<StatsQuestionCount> arrCounts = new ArrayList<StatsQuestionCount>(); ArrayList<StatsConstraint> arrConst = new ArrayList<StatsConstraint>(); StatsCompareConstraint compareConstraint = null; NodeList counts = e.getElementsByTagName("count"); NodeList constraints = e.getElementsByTagName("statsconstraint"); NodeList timespan = e.getElementsByTagName("timespan"); NodeList compareConstraintNodeList = e.getElementsByTagName("compareconstraint"); int subjLen = counts.getLength(); int constLen = constraints.getLength(); int spanLen = timespan.getLength(); int compLen = compareConstraintNodeList.getLength(); if(spanLen > 1){ throw new IllegalArgumentException("There can only be one timespan in a question"); } for(int i = 0; i < subjLen; i++){ arrCounts.add(createSubCountQuestion((Element) counts.item(i), mActivity)); } for(int i = 0; i < constLen; i++){ arrConst.add(ParserHelper.getStatsConstraintDetails((Element)constraints.item(i))); } if(compLen == 1){ Element compConstNode = (Element)compareConstraintNodeList.item(0); compareConstraint = ParserHelper.getCompareConstraintDetails(compConstNode); } StatsTimespan qTimespan = spanLen == 1 ? ParserHelper.getTimespanDetails((Element)timespan.item(0)) : null; retVal = new StatsQuestionRatio(mActivity, arrCounts, arrConst, qTimespan, compareConstraint); return retVal; } private static StatsQuestionPercentage createPercentageQuestion(Element e, ClinicalGuideActivity mActivity){ StatsQuestionPercentage retVal = null; StatsQuestionCount mainTarget = null; ArrayList<StatsQuestionCount> arrCounts = new ArrayList<StatsQuestionCount>(); ArrayList<StatsConstraint> arrConst = new ArrayList<StatsConstraint>(); StatsCompareConstraint compareConstraint = null; NodeList mainTargetList = e.getElementsByTagName("target"); NodeList otherList = e.getElementsByTagName("others"); NodeList constraints = e.getElementsByTagName("statsconstraint"); NodeList timespan = e.getElementsByTagName("timespan"); NodeList compareConstraintNodeList = e.getElementsByTagName("compareconstraint"); int subjLen = mainTargetList.getLength(); int otherLen = otherList.getLength(); int constLen = constraints.getLength(); int spanLen = timespan.getLength(); int compLen = compareConstraintNodeList.getLength(); if(spanLen > 1){ throw new IllegalArgumentException("There can only be one timespan in a question"); } if(subjLen != 1){ throw new IllegalArgumentException("There should be one target in a percentage question"); } mainTarget = createSubCountQuestion((Element)mainTargetList.item(0), mActivity); arrCounts.add(mainTarget); for(int i = 0; i < otherLen; i++){ arrCounts.add(createSubCountQuestion((Element) otherList.item(i), mActivity)); } for(int i = 0; i < constLen; i++){ arrConst.add(ParserHelper.getStatsConstraintDetails((Element)constraints.item(i))); } if(compLen == 1){ Element compConstNode = (Element)compareConstraintNodeList.item(0); compareConstraint = ParserHelper.getCompareConstraintDetails(compConstNode); } StatsTimespan qTimespan = spanLen == 1 ? ParserHelper.getTimespanDetails((Element)timespan.item(0)) : null; retVal = new StatsQuestionPercentage(mActivity, mainTarget, arrCounts, arrConst, qTimespan, compareConstraint); return retVal; } private static StatsQuestionCount createCountQuestion(Element e, ClinicalGuideActivity mActivity){ StatsSubject distinctSubj = null; ArrayList<StatsConstraint> arrConst = new ArrayList<StatsConstraint>(); StatsCompareConstraint compareConstraint = null; NodeList distinctNl = e.getElementsByTagName("distinct"); NodeList constraints = e.getElementsByTagName("statsconstraint"); NodeList compareConstraintNodeList = e.getElementsByTagName("compareconstraint"); NodeList timespan = e.getElementsByTagName("timespan"); int distinctLen = distinctNl.getLength(); int constLen = constraints.getLength(); int spanLen = timespan.getLength(); int compLen = compareConstraintNodeList.getLength(); if(spanLen > 1 || compLen > 1 || distinctLen > 1){ throw new IllegalArgumentException("There can only be one timespan or 1 comparecontraint in a question"); } if(distinctLen == 1){ distinctSubj = ParserHelper.getStatsSubjectDetails((Element) distinctNl.item(0)); } for(int i = 0; i < constLen; i++){ arrConst.add(ParserHelper.getStatsConstraintDetails((Element)constraints.item(i))); } if(compLen == 1){ Element compConstNode = (Element)compareConstraintNodeList.item(0); compareConstraint = ParserHelper.getCompareConstraintDetails(compConstNode); } StatsTimespan qTimespan = spanLen == 1 ? ParserHelper.getTimespanDetails((Element)timespan.item(0)) : null; String label = e.getAttribute("label"); return new StatsQuestionCount(mActivity, label, distinctSubj, arrConst, qTimespan, compareConstraint); } private static StatsQuestionCount createSubCountQuestion(Element e, ClinicalGuideActivity mActivity){ StatsSubject distinctSubj = null; ArrayList<StatsConstraint> arrConst = new ArrayList<StatsConstraint>(); StatsCompareConstraint compareConstraint = null; NodeList distinctNl = e.getElementsByTagName("countdistinct"); NodeList constraints = e.getElementsByTagName("countconstraint"); NodeList compareConstraintNodeList = e.getElementsByTagName("countcompareconstraint"); NodeList timespan = e.getElementsByTagName("counttimespan"); int distinctLen = distinctNl.getLength(); int constLen = constraints.getLength(); int spanLen = timespan.getLength(); int compLen = compareConstraintNodeList.getLength(); if(spanLen > 1 || compLen > 1 || distinctLen > 1){ throw new IllegalArgumentException("There can only be one timespan or 1 comparecontraint in a question"); } if(distinctLen == 1){ distinctSubj = ParserHelper.getStatsSubjectDetails((Element) distinctNl.item(0)); } for(int i = 0; i < constLen; i++){ arrConst.add(ParserHelper.getStatsConstraintDetails((Element)constraints.item(i))); } if(compLen == 1){ Element compConstNode = (Element)compareConstraintNodeList.item(0); compareConstraint = ParserHelper.getCompareConstraintDetails(compConstNode); } StatsTimespan qTimespan = spanLen == 1 ? ParserHelper.getTimespanDetails((Element)timespan.item(0)) : null; String label = e.getAttribute("label"); return new StatsQuestionCount(mActivity, label, distinctSubj, arrConst, qTimespan, compareConstraint); } private static AbstractStatsQuestion createAverageQuestion(Element e, ClinicalGuideActivity mActivity){ AbstractStatsQuestion retVal = null; ArrayList<StatsSubject> arrSubj = new ArrayList<StatsSubject>(); ArrayList<StatsConstraint> arrConst = new ArrayList<StatsConstraint>(); StatsCompareConstraint compareConstraint = null; NodeList subjects = e.getElementsByTagName("subject"); NodeList countQuestions = e.getElementsByTagName("count"); NodeList constraints = e.getElementsByTagName("statsconstraint"); NodeList compareConstraintNodeList = e.getElementsByTagName("compareconstraint"); NodeList timespan = e.getElementsByTagName("timespan"); int subjLen = subjects.getLength(); int countLen = countQuestions.getLength(); int constLen = constraints.getLength(); int spanLen = timespan.getLength(); int compLen = compareConstraintNodeList.getLength(); if(spanLen > 1 || compLen > 1){ throw new IllegalArgumentException("There can only be one timespan or 1 comparecontraint in a question"); } if(compLen == 1){ Element compConstNode = (Element)compareConstraintNodeList.item(0); compareConstraint = ParserHelper.getCompareConstraintDetails(compConstNode); } StatsTimespan qTimespan = spanLen == 1 ? ParserHelper.getTimespanDetails((Element)timespan.item(0)) : null; if(subjLen == 1){ StatsSubject subj = ParserHelper.getStatsSubjectDetails((Element) subjects.item(0)); retVal = new StatsQuestionAverage(mActivity, subj, arrConst, qTimespan, compareConstraint); } else if(countLen == 1){ StatsQuestionCount countQuestion =createSubCountQuestion((Element) countQuestions.item(0), mActivity); retVal = new StatsQuestionAverage(mActivity, countQuestion, arrConst, qTimespan, compareConstraint); } else { throw new IllegalArgumentException("Average question can only have either 1 <subject> or 1 <count> tag"); } for(int i = 0; i < subjLen; i++){ arrSubj.add(ParserHelper.getStatsSubjectDetails((Element) subjects.item(i))); } for(int i = 0; i < constLen; i++){ arrConst.add(ParserHelper.getStatsConstraintDetails((Element)constraints.item(i))); } return retVal; } private static StatsQuestionExtrema createExtremaQuestion(Element e, ClinicalGuideActivity mActivity){ StatsQuestionExtrema retVal = null; ArrayList<StatsSubject> arrSubj = new ArrayList<StatsSubject>(); ArrayList<StatsConstraint> arrConst = new ArrayList<StatsConstraint>(); StatsCompareConstraint compareConstraint = null; NodeList subjects = e.getElementsByTagName("subject"); NodeList countQuestions = e.getElementsByTagName("count"); NodeList constraints = e.getElementsByTagName("statsconstraint"); NodeList compareConstraintNodeList = e.getElementsByTagName("compareconstraint"); NodeList timespan = e.getElementsByTagName("timespan"); String type = ParserHelper.requiredAttributeGetter(e, "type"); int extremaType = -1; type = type.trim(); if(type.equalsIgnoreCase("max")){ extremaType = StatsQuestionExtrema.MAX; } else if(type.equalsIgnoreCase("min")){ extremaType = StatsQuestionExtrema.MIN; } int subjLen = subjects.getLength(); int countLen = countQuestions.getLength(); int constLen = constraints.getLength(); int spanLen = timespan.getLength(); int compLen = compareConstraintNodeList.getLength(); if(spanLen > 1 || compLen > 1){ throw new IllegalArgumentException("There can only be one timespan or 1 comparecontraint in a question"); } if(compLen == 1){ Element compConstNode = (Element)compareConstraintNodeList.item(0); compareConstraint = ParserHelper.getCompareConstraintDetails(compConstNode); } StatsTimespan qTimespan = spanLen == 1 ? ParserHelper.getTimespanDetails((Element)timespan.item(0)) : null; if(subjLen == 1){ StatsSubject subj = ParserHelper.getStatsSubjectDetails((Element) subjects.item(0)); retVal = new StatsQuestionExtrema(mActivity, extremaType, subj, arrConst, qTimespan, compareConstraint); } else if(countLen == 1){ StatsQuestionCount countQuestion =createSubCountQuestion((Element) countQuestions.item(0), mActivity); retVal = new StatsQuestionExtrema(mActivity, extremaType, countQuestion, arrConst, qTimespan, compareConstraint); } else { throw new IllegalArgumentException("Extrema question can only have either 1 <subject> or 1 <count> tag"); } for(int i = 0; i < subjLen; i++){ arrSubj.add(ParserHelper.getStatsSubjectDetails((Element) subjects.item(i))); } for(int i = 0; i < constLen; i++){ arrConst.add(ParserHelper.getStatsConstraintDetails((Element)constraints.item(i))); } return retVal; } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.history; import static io.restassured.RestAssured.expect; import static io.restassured.RestAssured.given; import static io.restassured.path.json.JsonPath.from; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_JOB_DEFINITION_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_JOB_DEFINITION_ID; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.util.Arrays; import java.util.Date; import java.util.List; import javax.ws.rs.core.Response.Status; import org.camunda.bpm.engine.history.HistoricIncident; import org.camunda.bpm.engine.history.HistoricIncidentQuery; import org.camunda.bpm.engine.impl.calendar.DateTimeUtil; import org.camunda.bpm.engine.rest.AbstractRestServiceTest; import org.camunda.bpm.engine.rest.exception.InvalidRequestException; import org.camunda.bpm.engine.rest.helper.MockProvider; import org.camunda.bpm.engine.rest.util.container.TestContainerRule; import org.junit.Assert; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.mockito.InOrder; import org.mockito.Mockito; import io.restassured.http.ContentType; import io.restassured.response.Response; /** * * @author Roman Smirnov * */ public class HistoricIncidentRestServiceQueryTest extends AbstractRestServiceTest { @ClassRule public static TestContainerRule rule = new TestContainerRule(); protected static final String HISTORY_INCIDENT_QUERY_URL = TEST_RESOURCE_ROOT_PATH + "/history/incident"; protected static final String HISTORY_INCIDENT_COUNT_QUERY_URL = HISTORY_INCIDENT_QUERY_URL + "/count"; private HistoricIncidentQuery mockedQuery; @Before public void setUpRuntimeData() { mockedQuery = setUpMockHistoricIncidentQuery(MockProvider.createMockHistoricIncidents()); } private HistoricIncidentQuery setUpMockHistoricIncidentQuery(List<HistoricIncident> mockedHistoricIncidents) { HistoricIncidentQuery mockedHistoricIncidentQuery = mock(HistoricIncidentQuery.class); when(mockedHistoricIncidentQuery.list()).thenReturn(mockedHistoricIncidents); when(mockedHistoricIncidentQuery.count()).thenReturn((long) mockedHistoricIncidents.size()); when(processEngine.getHistoryService().createHistoricIncidentQuery()).thenReturn(mockedHistoricIncidentQuery); return mockedHistoricIncidentQuery; } @Test public void testEmptyQuery() { String queryKey = ""; given() .queryParam("processInstanceId", queryKey) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); } @Test public void testNoParametersQuery() { expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).list(); verifyNoMoreInteractions(mockedQuery); } @Test public void testInvalidSortingOptions() { executeAndVerifySorting("anInvalidSortByOption", "asc", Status.BAD_REQUEST); executeAndVerifySorting("processInstanceId", "anInvalidSortOrderOption", Status.BAD_REQUEST); } protected void executeAndVerifySorting(String sortBy, String sortOrder, Status expectedStatus) { given() .queryParam("sortBy", sortBy) .queryParam("sortOrder", sortOrder) .then() .expect() .statusCode(expectedStatus.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); } @Test public void testSortOrderParameterOnly() { given() .queryParam("sortOrder", "asc") .then() .expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .contentType(ContentType.JSON) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", containsString("Only a single sorting parameter specified. sortBy and sortOrder required")) .when() .get(HISTORY_INCIDENT_QUERY_URL); } @Test public void testSortingParameters() { InOrder inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentMessage", "asc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentMessage", "desc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("createTime", "asc", Status.OK); inOrder.verify(mockedQuery).orderByCreateTime(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("createTime", "desc", Status.OK); inOrder.verify(mockedQuery).orderByCreateTime(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("endTime", "asc", Status.OK); inOrder.verify(mockedQuery).orderByCreateTime(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("endTime", "desc", Status.OK); inOrder.verify(mockedQuery).orderByEndTime(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentType", "asc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentType(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentType", "desc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentType(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("executionId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByExecutionId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("executionId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByExecutionId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("activityId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByActivityId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("activityId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByActivityId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("processInstanceId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByProcessInstanceId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("processInstanceId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByProcessInstanceId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("processDefinitionId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByProcessDefinitionId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("processDefinitionId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByProcessDefinitionId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("causeIncidentId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByCauseIncidentId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("causeIncidentId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByCauseIncidentId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("rootCauseIncidentId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByRootCauseIncidentId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("rootCauseIncidentId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByRootCauseIncidentId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("configuration", "asc", Status.OK); inOrder.verify(mockedQuery).orderByConfiguration(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("configuration", "desc", Status.OK); inOrder.verify(mockedQuery).orderByConfiguration(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("tenantId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByTenantId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("tenantId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByTenantId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentState", "asc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentState(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("incidentState", "desc", Status.OK); inOrder.verify(mockedQuery).orderByIncidentState(); inOrder.verify(mockedQuery).desc(); } @Test public void testSuccessfulPagination() { int firstResult = 0; int maxResults = 10; given() .queryParam("firstResult", firstResult) .queryParam("maxResults", maxResults) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).listPage(firstResult, maxResults); } @Test public void testMissingFirstResultParameter() { int maxResults = 10; given() .queryParam("maxResults", maxResults) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).listPage(0, maxResults); } @Test public void testMissingMaxResultsParameter() { int firstResult = 10; given() .queryParam("firstResult", firstResult) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).listPage(firstResult, Integer.MAX_VALUE); } @Test public void testQueryCount() { expect() .statusCode(Status.OK.getStatusCode()) .body("count", equalTo(1)) .when() .get(HISTORY_INCIDENT_COUNT_QUERY_URL); verify(mockedQuery).count(); } @Test public void testSimpleHistoricTaskInstanceQuery() { Response response = given() .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); InOrder inOrder = inOrder(mockedQuery); inOrder.verify(mockedQuery).list(); String content = response.asString(); List<String> incidents = from(content).getList(""); Assert.assertEquals("There should be one incident returned.", 1, incidents.size()); Assert.assertNotNull("The returned incident should not be null.", incidents.get(0)); String returnedId = from(content).getString("[0].id"); String returnedProcessDefinitionKey = from(content).getString("[0].processDefinitionKey"); String returnedProcessDefinitionId = from(content).getString("[0].processDefinitionId"); String returnedProcessInstanceId = from(content).getString("[0].processInstanceId"); String returnedExecutionId = from(content).getString("[0].executionId"); Date returnedCreateTime = DateTimeUtil.parseDate(from(content).getString("[0].createTime")); Date returnedEndTime = DateTimeUtil.parseDate(from(content).getString("[0].endTime")); String returnedIncidentType = from(content).getString("[0].incidentType"); String returnedActivityId = from(content).getString("[0].activityId"); String returnedCauseIncidentId = from(content).getString("[0].causeIncidentId"); String returnedRootCauseIncidentId = from(content).getString("[0].rootCauseIncidentId"); String returnedConfiguration = from(content).getString("[0].configuration"); String returnedIncidentMessage = from(content).getString("[0].incidentMessage"); Boolean returnedIncidentOpen = from(content).getBoolean("[0].open"); Boolean returnedIncidentDeleted = from(content).getBoolean("[0].deleted"); Boolean returnedIncidentResolved = from(content).getBoolean("[0].resolved"); String returnedTenantId = from(content).getString("[0].tenantId"); String returnedJobDefinitionId = from(content).getString("[0].jobDefinitionId"); Date returnedRemovalTime = DateTimeUtil.parseDate(from(content).getString("[0].removalTime")); String returnedRootProcessInstanceId = from(content).getString("[0].rootProcessInstanceId"); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_ID, returnedId); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_PROC_INST_ID, returnedProcessInstanceId); Assert.assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HIST_INCIDENT_CREATE_TIME), returnedCreateTime); Assert.assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HIST_INCIDENT_END_TIME), returnedEndTime); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_EXECUTION_ID, returnedExecutionId); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_PROC_DEF_ID, returnedProcessDefinitionId); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_PROC_DEF_KEY, returnedProcessDefinitionKey); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_TYPE, returnedIncidentType); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_ACTIVITY_ID, returnedActivityId); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_CAUSE_INCIDENT_ID, returnedCauseIncidentId); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_ROOT_CAUSE_INCIDENT_ID, returnedRootCauseIncidentId); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_CONFIGURATION, returnedConfiguration); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_MESSAGE, returnedIncidentMessage); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_STATE_OPEN, returnedIncidentOpen); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_STATE_DELETED, returnedIncidentDeleted); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_STATE_RESOLVED, returnedIncidentResolved); Assert.assertEquals(MockProvider.EXAMPLE_TENANT_ID, returnedTenantId); Assert.assertEquals(EXAMPLE_JOB_DEFINITION_ID, returnedJobDefinitionId); Assert.assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HIST_INCIDENT_REMOVAL_TIME), returnedRemovalTime); Assert.assertEquals(MockProvider.EXAMPLE_HIST_INCIDENT_ROOT_PROC_INST_ID, returnedRootProcessInstanceId); } @Test public void testQueryByIncidentId() { String incidentId = MockProvider.EXAMPLE_HIST_INCIDENT_ID; given() .queryParam("incidentId", incidentId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).incidentId(incidentId); } @Test public void testQueryByIncidentType() { String incidentType = MockProvider.EXAMPLE_HIST_INCIDENT_TYPE; given() .queryParam("incidentType", incidentType) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).incidentType(incidentType); } @Test public void testQueryByIncidentMessage() { String incidentMessage = MockProvider.EXAMPLE_HIST_INCIDENT_MESSAGE; given() .queryParam("incidentMessage", incidentMessage) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).incidentMessage(incidentMessage); } @Test public void testQueryByProcessDefinitionId() { String processDefinitionId = MockProvider.EXAMPLE_HIST_INCIDENT_PROC_DEF_ID; given() .queryParam("processDefinitionId", processDefinitionId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).processDefinitionId(processDefinitionId); } @Test public void testQueryByProcessInstanceId() { String processInstanceId = MockProvider.EXAMPLE_HIST_INCIDENT_PROC_INST_ID; given() .queryParam("processInstanceId", processInstanceId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).processInstanceId(processInstanceId); } @Test public void testQueryByExecutionId() { String executionId = MockProvider.EXAMPLE_HIST_INCIDENT_EXECUTION_ID; given() .queryParam("executionId", executionId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).executionId(executionId); } @Test public void testQueryByActivityId() { String activityId = MockProvider.EXAMPLE_HIST_INCIDENT_ACTIVITY_ID; given() .queryParam("activityId", activityId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).activityId(activityId); } @Test public void testQueryByCauseIncidentId() { String causeIncidentId = MockProvider.EXAMPLE_HIST_INCIDENT_CAUSE_INCIDENT_ID; given() .queryParam("causeIncidentId", causeIncidentId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).causeIncidentId(causeIncidentId); } @Test public void testQueryByRootCauseIncidentId() { String rootCauseIncidentId = MockProvider.EXAMPLE_HIST_INCIDENT_ROOT_CAUSE_INCIDENT_ID; given() .queryParam("rootCauseIncidentId", rootCauseIncidentId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).rootCauseIncidentId(rootCauseIncidentId); } @Test public void testQueryByConfiguration() { String configuration = MockProvider.EXAMPLE_HIST_INCIDENT_CONFIGURATION; given() .queryParam("configuration", configuration) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).configuration(configuration); } @Test public void testQueryByHistoryConfiguration() { String historyConfiguration = MockProvider.EXAMPLE_HIST_INCIDENT_HISTORY_CONFIGURATION; given() .queryParam("historyConfiguration", historyConfiguration) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).historyConfiguration(historyConfiguration); } @Test public void testQueryByOpen() { given() .queryParam("open", true) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).open(); } @Test public void testQueryByResolved() { given() .queryParam("resolved", true) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).resolved(); } @Test public void testQueryByDeleted() { given() .queryParam("deleted", true) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).deleted(); } @Test public void testQueryByTenantIds() { mockedQuery = setUpMockHistoricIncidentQuery(Arrays.asList( MockProvider.createMockHistoricIncident(MockProvider.EXAMPLE_TENANT_ID), MockProvider.createMockHistoricIncident(MockProvider.ANOTHER_EXAMPLE_TENANT_ID))); Response response = given() .queryParam("tenantIdIn", MockProvider.EXAMPLE_TENANT_ID_LIST) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).tenantIdIn(MockProvider.EXAMPLE_TENANT_ID, MockProvider.ANOTHER_EXAMPLE_TENANT_ID); verify(mockedQuery).list(); String content = response.asString(); List<String> incidents = from(content).getList(""); assertThat(incidents).hasSize(2); String returnedTenantId1 = from(content).getString("[0].tenantId"); String returnedTenantId2 = from(content).getString("[1].tenantId"); assertThat(returnedTenantId1).isEqualTo(MockProvider.EXAMPLE_TENANT_ID); assertThat(returnedTenantId2).isEqualTo(MockProvider.ANOTHER_EXAMPLE_TENANT_ID); } @Test public void testQueryByJobDefinitionIds() { String jobDefinitionIds = EXAMPLE_JOB_DEFINITION_ID + "," + NON_EXISTING_JOB_DEFINITION_ID; given() .queryParam("jobDefinitionIdIn", jobDefinitionIds) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORY_INCIDENT_QUERY_URL); verify(mockedQuery).jobDefinitionIdIn(EXAMPLE_JOB_DEFINITION_ID, NON_EXISTING_JOB_DEFINITION_ID); verify(mockedQuery).list(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.easy.json.loader; import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray; import static org.apache.drill.test.rowSet.RowSetUtilities.mapValue; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.apache.drill.categories.JsonTest; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.physical.rowSet.RowSet; import org.apache.drill.exec.record.metadata.ColumnMetadata; import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.exec.record.metadata.TupleMetadata; import org.apache.drill.test.rowSet.RowSetUtilities; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(JsonTest.class) public class TestObjects extends BaseJsonLoaderTest { @Test public void testMap() { String json = "{a: 1, m: {b: 10, c: 20}}\n" + "{a: 2, m: {b: 110}}\n" + "{a: 3, m: {c: 220}}\n" + "{a: 4, m: {}}\n" + "{a: 5, m: null}\n" + "{a: 6}\n" + "{a: 7, m: {b: 710, c: 720}}"; JsonLoaderFixture loader = new JsonLoaderFixture(); loader.open(json); RowSet results = loader.next(); assertNotNull(results); TupleMetadata expectedSchema = new SchemaBuilder() .addNullable("a", MinorType.BIGINT) .addMap("m") .addNullable("b", MinorType.BIGINT) .addNullable("c", MinorType.BIGINT) .resumeSchema() .build(); RowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow(1L, mapValue(10L, 20L)) .addRow(2L, mapValue(110L, null)) .addRow(3L, mapValue(null, 220L)) .addRow(4L, mapValue(null, null)) .addRow(5L, mapValue(null, null)) .addRow(6L, mapValue(null, null)) .addRow(7L, mapValue(710L, 720L)) .build(); RowSetUtilities.verify(expected, results); assertNull(loader.next()); loader.close(); } /** * Without a schema, leading nulls or empty maps can be ambiguous. * With a schema, the meaning is clear. */ @Test public void testMapWithSchema() { String json = "{a: 6}\n" + "{a: 5, m: null}\n" + "{a: 4, m: {}}\n" + "{a: 2, m: {b: 110}}\n" + "{a: 3, m: {c: 220}}\n" + "{a: 1, m: {b: 10, c: 20}}\n" + "{a: 7, m: {b: 710, c: 720}}"; TupleMetadata schema = new SchemaBuilder() .addNullable("a", MinorType.BIGINT) .addMap("m") .addNullable("b", MinorType.BIGINT) .addNullable("c", MinorType.BIGINT) .resumeSchema() .build(); JsonLoaderFixture loader = new JsonLoaderFixture(); loader.builder.providedSchema(schema); loader.open(json); RowSet results = loader.next(); assertNotNull(results); RowSet expected = fixture.rowSetBuilder(schema) .addRow(6L, mapValue(null, null)) .addRow(5L, mapValue(null, null)) .addRow(4L, mapValue(null, null)) .addRow(2L, mapValue(110L, null)) .addRow(3L, mapValue(null, 220L)) .addRow(1L, mapValue(10L, 20L)) .addRow(7L, mapValue(710L, 720L)) .build(); RowSetUtilities.verify(expected, results); assertNull(loader.next()); loader.close(); } @Test public void testMapAsJson() { String json = "{a: 6}\n" + "{a: 5, m: null}\n" + "{a: 4, m: {}}\n" + "{a: 2, m: {b: 110}}\n" + "{a: 3, m: {c: 220}}\n" + "{a: 1, m: {b: 10, c: 20}}\n" + "{a: 7, m: {b: 710, c: 720}}"; TupleMetadata schema = new SchemaBuilder() .addNullable("a", MinorType.BIGINT) .addNullable("m", MinorType.VARCHAR) .build(); ColumnMetadata m = schema.metadata("m"); m.setProperty(JsonLoader.JSON_MODE, JsonLoader.JSON_LITERAL_MODE); JsonLoaderFixture loader = new JsonLoaderFixture(); loader.builder.providedSchema(schema); loader.open(json); RowSet results = loader.next(); assertNotNull(results); RowSet expected = fixture.rowSetBuilder(schema) .addRow(6L, null) .addRow(5L, "null") .addRow(4L, "{}") .addRow(2L, "{\"b\": 110}") .addRow(3L, "{\"c\": 220}") .addRow(1L, "{\"b\": 10, \"c\": 20}") .addRow(7L, "{\"b\": 710, \"c\": 720}") .build(); RowSetUtilities.verify(expected, results); assertNull(loader.next()); loader.close(); } @Test public void testMapArray() { String json = "{a: 1, m: [{b: 10, c: 20}, {b: 11, c: 21}]}\n" + "{a: 2, m: [{b: 110}]}\n" + "{a: 3, m: [{c: 220}]}\n" + "{a: 4, m: [{}]}\n" + "{a: 5, m: [null]}\n" + "{a: 6, m: []}\n" + "{a: 7, m: null}\n" + "{a: 8}\n" + "{a: 9, m: [{b: 710, c: 720}, {b: 711, c: 721}]}"; JsonLoaderFixture loader = new JsonLoaderFixture(); loader.open(json); RowSet results = loader.next(); assertNotNull(results); TupleMetadata expectedSchema = new SchemaBuilder() .addNullable("a", MinorType.BIGINT) .addMapArray("m") .addNullable("b", MinorType.BIGINT) .addNullable("c", MinorType.BIGINT) .resumeSchema() .build(); RowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow(1L, mapArray(mapValue(10L, 20L), mapValue(11L, 21L))) .addRow(2L, mapArray(mapValue(110L, null))) .addRow(3L, mapArray(mapValue(null, 220L))) .addRow(4L, mapArray(mapValue(null, null))) .addRow(5L, mapArray(mapValue(null, null))) .addRow(6L, mapArray()) .addRow(7L, mapArray()) .addRow(8L, mapArray()) .addRow(9L, mapArray(mapValue(710L, 720L), mapValue(711L, 721L))) .build(); RowSetUtilities.verify(expected, results); assertNull(loader.next()); loader.close(); } /** * With a schema we don't have to infer the type of the map or its members. * Instead, we can tolerate extreme ambiguity in both. */ @Test public void testMapArrayWithSchema() { String json = "{a: 8}\n" + "{a: 7, m: null}\n" + "{a: 6, m: []}\n" + "{a: 5, m: [null]}\n" + "{a: 4, m: [{}]}\n" + "{a: 10, m: [{b: null}]}\n" + "{a: 11, m: [{c: null}]}\n" + "{a: 12, m: [{b: null}, {c: null}]}\n" + "{a: 2, m: [{b: 110}]}\n" + "{a: 3, m: [{c: 220}]}\n" + "{a: 1, m: [{b: 10, c: 20}, {b: 11, c: 21}]}\n" + "{a: 9, m: [{b: 710, c: 720}, {b: 711, c: 721}]}"; TupleMetadata schema = new SchemaBuilder() .addNullable("a", MinorType.BIGINT) .addMapArray("m") .addNullable("b", MinorType.BIGINT) .addNullable("c", MinorType.BIGINT) .resumeSchema() .build(); JsonLoaderFixture loader = new JsonLoaderFixture(); loader.builder.providedSchema(schema); loader.open(json); RowSet results = loader.next(); assertNotNull(results); RowSet expected = fixture.rowSetBuilder(schema) .addRow( 8L, mapArray()) .addRow( 7L, mapArray()) .addRow( 6L, mapArray()) .addRow( 5L, mapArray(mapValue(null, null))) .addRow( 4L, mapArray(mapValue(null, null))) .addRow(10L, mapArray(mapValue(null, null))) .addRow(11L, mapArray(mapValue(null, null))) .addRow(12L, mapArray(mapValue(null, null), mapValue(null, null))) .addRow( 2L, mapArray(mapValue(110L, null))) .addRow( 3L, mapArray(mapValue(null, 220L))) .addRow( 1L, mapArray(mapValue(10L, 20L), mapValue(11L, 21L))) .addRow( 9L, mapArray(mapValue(710L, 720L), mapValue(711L, 721L))) .build(); RowSetUtilities.verify(expected, results); assertNull(loader.next()); loader.close(); } /** * The structure parser feels its way along looking ahead at tokens * to guess types. Test the case where the member is an array containing * null (so the parser does not know the type). Given a schema, we know * it is a map. */ @Test public void testMapArrayWithSchemaInitialNullMember() { String json = "{a: 5, m: [null]}\n" + "{a: 1, m: [{b: 10, c: 20}, {b: 11, c: 21}]}\n"; TupleMetadata schema = new SchemaBuilder() .addNullable("a", MinorType.BIGINT) .addMapArray("m") .addNullable("b", MinorType.BIGINT) .addNullable("c", MinorType.BIGINT) .resumeSchema() .build(); JsonLoaderFixture loader = new JsonLoaderFixture(); loader.builder.providedSchema(schema); loader.open(json); RowSet results = loader.next(); assertNotNull(results); RowSet expected = fixture.rowSetBuilder(schema) .addRow( 5L, mapArray(mapValue(null, null))) .addRow( 1L, mapArray(mapValue(10L, 20L), mapValue(11L, 21L))) .build(); RowSetUtilities.verify(expected, results); assertNull(loader.next()); loader.close(); } @Test public void testObjectToScalar() { String json = "{a: {b: 10}} {a: 10}"; JsonLoaderFixture loader = new JsonLoaderFixture(); loader.open(json); try { loader.next(); fail(); } catch (UserException e) { assertTrue(e.getMessage().contains("JSON object expected")); } loader.close(); } @Test public void testObjectToArray() { String json = "{a: {b: 10}} {a: [10]}"; JsonLoaderFixture loader = new JsonLoaderFixture(); loader.open(json); try { loader.next(); fail(); } catch (UserException e) { assertTrue(e.getMessage().contains("JSON object expected")); } loader.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.util; import java.awt.Color; import java.awt.color.ColorSpace; import java.util.Arrays; /** * Color helper class. * <p> * This class extends java.awt.Color class keeping track of the original color * property values specified by the fo user in a rgb-icc call. * @deprecated Replaced by {@link org.apache.xmlgraphics.java2d.color.ColorWithAlternatives} */ @Deprecated public final class ColorExt extends Color { // private static final long serialVersionUID = 1L; // Values of fop-rgb-icc arguments private float rgbReplacementRed; private float rgbReplacementGreen; private float rgbReplacementBlue; private String iccProfileName; private String iccProfileSrc; private ColorSpace colorSpace; private float[] colorValues; /* * Helper for createFromFoRgbIcc */ private ColorExt(ColorSpace colorSpace, float[] colorValues, float opacity) { super(colorSpace, colorValues, opacity); } /* * Helper for createFromSvgIccColor */ private ColorExt(float red, float green, float blue, float opacity) { super(red, green, blue, opacity); } /** * Create ColorExt object backup up FO's rgb-icc color function * * @param redReplacement * Red part of RGB replacement color that will be used when ICC * profile can not be loaded * @param greenReplacement * Green part of RGB replacement color that will be used when ICC * profile can not be loaded * @param blueReplacement * Blue part of RGB replacement color that will be used when ICC * profile can not be loaded * @param profileName * Name of ICC profile * @param profileSrc * Source of ICC profile * @param colorSpace * ICC ColorSpace for the ICC profile * @param iccValues * color values * @return the requested color object */ public static ColorExt createFromFoRgbIcc(float redReplacement, float greenReplacement, float blueReplacement, String profileName, String profileSrc, ColorSpace colorSpace, float[] iccValues) { ColorExt ce = new ColorExt(colorSpace, iccValues, 1.0f); ce.rgbReplacementRed = redReplacement; ce.rgbReplacementGreen = greenReplacement; ce.rgbReplacementBlue = blueReplacement; ce.iccProfileName = profileName; ce.iccProfileSrc = profileSrc; ce.colorSpace = colorSpace; ce.colorValues = iccValues; return ce; } /** * Create ColorExt object backing up SVG's icc-color function. * * @param red * Red value resulting from the conversion from the user provided * (icc) color values to the batik (rgb) color space * @param green * Green value resulting from the conversion from the user * provided (icc) color values to the batik (rgb) color space * @param blue * Blue value resulting from the conversion from the user * provided (icc) color values to the batik (rgb) color space * @param opacity * Opacity * @param profileName * ICC profile name * @param profileHref * the URI to the color profile * @param profileCS * ICC ColorSpace profile * @param colorValues * ICC color values * @return the requested color object */ public static ColorExt createFromSvgIccColor(float red, float green, float blue, float opacity, String profileName, String profileHref, ColorSpace profileCS, float[] colorValues) { //TODO this method is not referenced by FOP, can it be deleted? ColorExt ce = new ColorExt(red, green, blue, opacity); ce.rgbReplacementRed = -1; ce.rgbReplacementGreen = -1; ce.rgbReplacementBlue = -1; ce.iccProfileName = profileName; ce.iccProfileSrc = profileHref; ce.colorSpace = profileCS; ce.colorValues = colorValues; return ce; } /** {@inheritDoc} */ @Override public int hashCode() { //implementation from the superclass should be good enough for our purposes return super.hashCode(); } /** {@inheritDoc} */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } ColorExt other = (ColorExt)obj; //TODO maybe use super.getColorComponents() instead if (!Arrays.equals(colorValues, other.colorValues)) { return false; } if (iccProfileName == null) { if (other.iccProfileName != null) { return false; } } else if (!iccProfileName.equals(other.iccProfileName)) { return false; } if (iccProfileSrc == null) { if (other.iccProfileSrc != null) { return false; } } else if (!iccProfileSrc.equals(other.iccProfileSrc)) { return false; } if (Float.floatToIntBits(rgbReplacementBlue) != Float.floatToIntBits(other.rgbReplacementBlue)) { return false; } if (Float.floatToIntBits(rgbReplacementGreen) != Float.floatToIntBits(other.rgbReplacementGreen)) { return false; } if (Float.floatToIntBits(rgbReplacementRed) != Float.floatToIntBits(other.rgbReplacementRed)) { return false; } return true; } /** * Get ICC profile name * * @return ICC profile name */ public String getIccProfileName() { return this.iccProfileName; } /** * Get ICC profile source * * @return ICC profile source */ public String getIccProfileSrc() { return this.iccProfileSrc; } /** * @return the original ColorSpace */ public ColorSpace getOrigColorSpace() { //TODO this method is probably unnecessary due to super.cs and getColorSpace() return this.colorSpace; } /** * Returns the original color values. * @return the original color values */ public float[] getOriginalColorComponents() { //TODO this method is probably unnecessary due to super.fvalue and getColorComponents() float[] copy = new float[this.colorValues.length]; System.arraycopy(this.colorValues, 0, copy, 0, copy.length); return copy; } /** * Create string representation of fop-rgb-icc function call to map this * ColorExt settings * @return the string representing the internal fop-rgb-icc() function call */ public String toFunctionCall() { StringBuffer sb = new StringBuffer(40); sb.append("fop-rgb-icc("); sb.append(this.rgbReplacementRed + ","); sb.append(this.rgbReplacementGreen + ","); sb.append(this.rgbReplacementBlue + ","); sb.append(this.iccProfileName + ","); if (this.iccProfileSrc != null) { sb.append("\"" + this.iccProfileSrc + "\""); } float[] colorComponents = this.getColorComponents(null); for (int ix = 0; ix < colorComponents.length; ix++) { sb.append(","); sb.append(colorComponents[ix]); } sb.append(")"); return sb.toString(); } }
//***************************************************************************** //* //* (c) Copyright 2002. Glub Tech, Incorporated. All Rights Reserved. //* //* $Id: TransparentWindow.java 37 2009-05-11 22:46:15Z gary $ //* //***************************************************************************** // found on java forums. made some improvements. - gary package com.glub.gui; import com.glub.util.*; import java.awt.*; import java.awt.event.*; import java.util.*; import javax.swing.*; public class TransparentWindow extends JWindow { protected static final long serialVersionUID = 1L; private ArrayList stringOverlay = new ArrayList(); private Graphics2D tig; private Image img; private Image tim; private Robot r; private ImageIcon imageIcon; final Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); public TransparentWindow( final ImageIcon image ) { this( null, image, false ); } public TransparentWindow( Frame owner, final ImageIcon image ) { this( owner, image, false ); } public TransparentWindow( Frame owner, final ImageIcon image, boolean draggable ) { super( owner); imageIcon = image; try { r = new Robot(); } catch (AWTException awe) { //System.out.println("robot excepton occurred"); } if ( draggable ) { WindowDragger dragger = new WindowDragger(); addMouseMotionListener(dragger); addMouseListener(dragger); } addFocusListener(new FocusAdapter() { public void focusGained(FocusEvent e) { setSize(0, 0); capture(); setSize(imageIcon.getIconWidth(), imageIcon.getIconHeight()); setBounds( (screenSize.width / 2) - (getSize().width / 2), (screenSize.height / 2) - (getSize().height / 2), imageIcon.getIconWidth(), imageIcon.getIconHeight() ); } }); setSize( 0, 0 ); capture(); setSize( imageIcon.getIconWidth(), imageIcon.getIconHeight() ); setLocation( (screenSize.width / 2) - (getSize().width / 2), (screenSize.height / 2) - (getSize().height / 2) ); } public void capture() { img = r.createScreenCapture(new Rectangle(0, 0, screenSize.width, screenSize.height)); } public void captureX() { Rectangle rect = getBounds(); setVisible(false); Image xmg = r.createScreenCapture(rect); img.getGraphics().drawImage(xmg, rect.x, rect.y, rect.width, rect.height, null); setVisible(true); } public void paint(Graphics g) { Rectangle rect = g.getClipBounds(); if (tim == null) { tim = createImage(getWidth(), getHeight()); tig = (Graphics2D)tim.getGraphics(); } if (!rect.getSize().equals(getSize())) { captureX(); } else { paintP(g); } } public void paintP(Graphics g) { tig.drawImage(img, 0, 0, getWidth(), getHeight(), getX(), getY(), getX() + getWidth(), getY() + getHeight(), null); tig.drawImage(imageIcon.getImage(), 0, 0, null); Font defaultFont = new Font( "Verdana", Font.PLAIN, 10 ); for( int i = 0; i < stringOverlay.size(); i++ ) { StringOverlay ol = (StringOverlay)stringOverlay.get(i); Font font = ol.getFont(); if ( font == null ) { font = defaultFont; } tig.setFont( font ); tig.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON ); tig.drawString( ol.getString(), ol.getXPos(), ol.getYPos() ); } g.drawImage(tim, 0, 0, null); } public void update(Graphics g) { // on the mac we flicker... if ( !Util.isMacOS() ) { this.paint(g); } } public void addStringOverlay( String str, int x, int y ) { addStringOverlay( str, x, y, null ); } public void addStringOverlay( String str, int x, int y, Font font ) { stringOverlay.add( new StringOverlay(str, x, y, font) ); } /* private class BackgroundRefresher extends FocusAdapter { public void focusGained(FocusEvent e) { setSize(0, 0); capture(); setSize(imageIcon.getIconWidth(), imageIcon.getIconHeight()); } } */ private class WindowDragger implements MouseListener, MouseMotionListener { private Point mp; public void mouseClicked(MouseEvent e) {} public void mouseDragged(MouseEvent e) { if (mp == null) { return; } Point p = e.getPoint(); int x = (getX() + p.x) - mp.x; int y = (getY() + p.y) - mp.y; setLocation(x, y); paintP(getGraphics()); } public void mouseEntered(MouseEvent e) {} public void mouseExited(MouseEvent e) {} public void mouseMoved(MouseEvent e) {} public void mousePressed(MouseEvent e) { mp = e.getPoint(); } public void mouseReleased(MouseEvent e) { mp = null; } } } class StringOverlay { private String str = null; private int xPos = 0; private int yPos = 0; private Font font = null; public StringOverlay( String str, int x, int y ) { this( str, x, y, null ); } public StringOverlay( String str, int x, int y, Font font ) { this.str = str; this.xPos = x; this.yPos = y; this.font = font; } public String getString() { return str; } public int getXPos() { return xPos; } public int getYPos() { return yPos; } public Font getFont() { return font; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.store.jdbc.dialect; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import java.util.*; /** * Represents a dialect of SQL implemented by a particular RDBMS. */ public class BasicJdbcDialect implements JdbcDialect { /** Default max query parameters count. */ protected static final int DFLT_MAX_PARAMS_CNT = 2000; /** Max query parameters count. */ protected int maxParamsCnt = DFLT_MAX_PARAMS_CNT; /** * Concatenates elements using provided separator. * * @param elems Concatenated elements. * @param f closure used for transform element. * @param start Start string. * @param sep Separator. * @param end End string. * @return Concatenated string. */ protected static <T> String mkString(Iterable<T> elems, C1<T, String> f, String start, String sep, String end) { SB sb = new SB(start); boolean first = true; for (T elem : elems) { if (!first) sb.a(sep); sb.a(f.apply(elem)); first = false; } return sb.a(end).toString(); } /** * Concatenates elements using provided separator. * * @param strs Concatenated string. * @param start Start string. * @param sep Delimiter. * @param end End string. * @return Concatenated string. */ protected static String mkString(Iterable<String> strs, String start, String sep, String end) { return mkString(strs, new C1<String, String>() { @Override public String apply(String s) { return s; } }, start, sep, end); } /** * Concatenates strings using provided separator. * * @param strs Concatenated string. * @param sep Separator. * @return Concatenated string. */ protected static String mkString(Iterable<String> strs, String sep) { return mkString(strs, new C1<String, String>() { @Override public String apply(String s) { return s; } }, "", sep, ""); } /** * Concatenates elements using provided delimiter. * * @param str Repeated string. * @param cnt Repeat count. * @param start Start string. * @param sep Separator. * @param end End string. */ protected static String repeat(String str, int cnt, String start, String sep, String end) { SB sb = new SB(str.length() * cnt + sep.length() * (cnt - 1) + start.length() + end.length()); sb.a(start); for (int i = 0; i < cnt; i++) { if (i > 0) sb.a(sep); sb.a(str); } return sb.a(end).toString(); } /** * Construct where part of query. * * @param keyCols Database key columns. * @param keyCnt Key count. */ private static String where(Collection<String> keyCols, int keyCnt) { SB sb = new SB(); if (keyCols.size() == 1) { String keyCol = keyCols.iterator().next(); if (keyCnt == 1) sb.a(keyCol + "=?"); else sb.a(repeat("?", keyCnt, keyCol + " IN (", ",", ")")); } else { String keyParams = mkString(keyCols, new C1<String, String>() { @Override public String apply(String s) { return s + "=?"; } }, "(", " AND ", ")"); sb.a(repeat(keyParams, keyCnt, "", " OR ", "")); } return sb.toString(); } /** {@inheritDoc} */ @Override public String loadCacheSelectRangeQuery(String fullTblName, Collection<String> keyCols) { String cols = mkString(keyCols, ","); return String.format("SELECT %s FROM (SELECT %s, ROWNUM() AS rn FROM %s ORDER BY %s) WHERE mod(rn, ?) = 0", cols, cols, fullTblName, cols); } /** {@inheritDoc} */ @Override public String loadCacheRangeQuery(String fullTblName, Collection<String> keyCols, Iterable<String> uniqCols, boolean appendLowerBound, boolean appendUpperBound) { assert appendLowerBound || appendUpperBound; SB sb = new SB(); String[] cols = keyCols.toArray(new String[keyCols.size()]); if (appendLowerBound) { sb.a("("); for (int cnt = keyCols.size(); cnt > 0; cnt--) { for (int j = 0; j < cnt; j++) if (j == cnt - 1) sb.a(cols[j]).a(" > ? "); else sb.a(cols[j]).a(" = ? AND "); if (cnt != 1) sb.a("OR "); } sb.a(")"); } if (appendLowerBound && appendUpperBound) sb.a(" AND "); if (appendUpperBound) { sb.a("("); for (int cnt = keyCols.size(); cnt > 0; cnt--) { for (int j = 0; j < cnt; j++) if (j == cnt - 1) sb.a(cols[j]).a(" <= ? "); else sb.a(cols[j]).a(" = ? AND "); if (cnt != 1) sb.a(" OR "); } sb.a(")"); } return String.format("SELECT %s FROM %s WHERE %s", mkString(uniqCols, ","), fullTblName, sb.toString()); } /** {@inheritDoc} */ @Override public String loadCacheQuery(String fullTblName, Iterable<String> uniqCols) { return String.format("SELECT %s FROM %s", mkString(uniqCols, ","), fullTblName); } /** {@inheritDoc} */ @Override public String loadQuery(String fullTblName, Collection<String> keyCols, Iterable<String> cols, int keyCnt) { assert !keyCols.isEmpty(); String params = where(keyCols, keyCnt); return String.format("SELECT %s FROM %s WHERE %s", mkString(cols, ","), fullTblName, params); } /** {@inheritDoc} */ @Override public String insertQuery(String fullTblName, Collection<String> keyCols, Collection<String> valCols) { Collection<String> cols = F.concat(false, keyCols, valCols); return String.format("INSERT INTO %s(%s) VALUES(%s)", fullTblName, mkString(cols, ","), repeat("?", cols.size(), "", ",", "")); } /** {@inheritDoc} */ @Override public String updateQuery(String fullTblName, Collection<String> keyCols, Iterable<String> valCols) { String params = mkString(valCols, new C1<String, String>() { @Override public String apply(String s) { return s + "=?"; } }, "", ",", ""); return String.format("UPDATE %s SET %s WHERE %s", fullTblName, params, where(keyCols, 1)); } /** {@inheritDoc} */ @Override public boolean hasMerge() { return false; } /** {@inheritDoc} */ @Override public String mergeQuery(String fullTblName, Collection<String> keyCols, Collection<String> uniqCols) { return ""; } /** {@inheritDoc} */ @Override public String removeQuery(String fullTblName, Iterable<String> keyCols) { String whereParams = mkString(keyCols, new C1<String, String>() { @Override public String apply(String s) { return s + "=?"; } }, "", " AND ", ""); return String.format("DELETE FROM %s WHERE %s", fullTblName, whereParams); } /** {@inheritDoc} */ @Override public int getMaxParamsCnt() { return maxParamsCnt; } /** * Set max query parameters count. * * @param maxParamsCnt Max query parameters count. */ public void setMaxParamsCnt(int maxParamsCnt) { this.maxParamsCnt = maxParamsCnt; } }
package com.felhr.usbserial; import java.util.Arrays; import android.hardware.usb.UsbConstants; import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbDeviceConnection; import android.hardware.usb.UsbEndpoint; import android.hardware.usb.UsbInterface; import android.hardware.usb.UsbRequest; import android.util.Log; import com.felhr.utils.SafeUsbRequest; public class FTDISerialDevice extends UsbSerialDevice { private static final String CLASS_ID = FTDISerialDevice.class.getSimpleName(); private static final int FTDI_SIO_RESET = 0; private static final int FTDI_SIO_MODEM_CTRL = 1; private static final int FTDI_SIO_SET_FLOW_CTRL = 2; private static final int FTDI_SIO_SET_BAUD_RATE = 3; private static final int FTDI_SIO_SET_DATA = 4; private static final int FTDI_REQTYPE_HOST2DEVICE = 0x40; /** * RTS and DTR values obtained from FreeBSD FTDI driver * https://github.com/freebsd/freebsd/blob/70b396ca9c54a94c3fad73c3ceb0a76dffbde635/sys/dev/usb/serial/uftdi_reg.h */ private static final int FTDI_SIO_SET_DTR_MASK = 0x1; private static final int FTDI_SIO_SET_DTR_HIGH = (1 | (FTDI_SIO_SET_DTR_MASK << 8)); private static final int FTDI_SIO_SET_DTR_LOW = (0 | (FTDI_SIO_SET_DTR_MASK << 8)); private static final int FTDI_SIO_SET_RTS_MASK = 0x2; private static final int FTDI_SIO_SET_RTS_HIGH = (2 | (FTDI_SIO_SET_RTS_MASK << 8)); private static final int FTDI_SIO_SET_RTS_LOW = (0 | (FTDI_SIO_SET_RTS_MASK << 8)); public static final int FTDI_BAUDRATE_300 = 0x2710; public static final int FTDI_BAUDRATE_600 = 0x1388; public static final int FTDI_BAUDRATE_1200 = 0x09c4; public static final int FTDI_BAUDRATE_2400 = 0x04e2; public static final int FTDI_BAUDRATE_4800 = 0x0271; public static final int FTDI_BAUDRATE_9600 = 0x4138; public static final int FTDI_BAUDRATE_19200 = 0x809c; public static final int FTDI_BAUDRATE_38400 = 0xc04e; public static final int FTDI_BAUDRATE_57600 = 0x0034; public static final int FTDI_BAUDRATE_115200 = 0x001a; public static final int FTDI_BAUDRATE_230400 = 0x000d; public static final int FTDI_BAUDRATE_460800 = 0x4006; public static final int FTDI_BAUDRATE_921600 = 0x8003; /*** * Default Serial Configuration * Baud rate: 9600 * Data bits: 8 * Stop bits: 1 * Parity: None * Flow Control: Off */ private static final int FTDI_SET_DATA_DEFAULT = 0x0008; private static final int FTDI_SET_MODEM_CTRL_DEFAULT1 = 0x0101; private static final int FTDI_SET_MODEM_CTRL_DEFAULT2 = 0x0202; private static final int FTDI_SET_MODEM_CTRL_DEFAULT3 = 0x0100; private static final int FTDI_SET_MODEM_CTRL_DEFAULT4 = 0x0200; private static final int FTDI_SET_FLOW_CTRL_DEFAULT = 0x0000; private int currentSioSetData = 0x0000; /** * Flow control variables */ private boolean rtsCtsEnabled; private boolean dtrDsrEnabled; private boolean ctsState; private boolean dsrState; private boolean firstTime; // with this flag we set the CTS and DSR state to the first value received from the FTDI device private UsbCTSCallback ctsCallback; private UsbDSRCallback dsrCallback; private UsbInterface mInterface; private UsbEndpoint inEndpoint; private UsbEndpoint outEndpoint; private UsbRequest requestIN; public FTDIUtilities ftdiUtilities; private UsbSerialInterface.UsbParityCallback parityCallback; private UsbSerialInterface.UsbFrameCallback frameCallback; private UsbSerialInterface.UsbOverrunCallback overrunCallback; private UsbSerialInterface.UsbBreakCallback breakCallback; public FTDISerialDevice(UsbDevice device, UsbDeviceConnection connection) { this(device, connection, -1); } public FTDISerialDevice(UsbDevice device, UsbDeviceConnection connection, int iface) { super(device, connection); ftdiUtilities = new FTDIUtilities(); rtsCtsEnabled = false; dtrDsrEnabled = false; ctsState = true; dsrState = true; firstTime = true; mInterface = device.getInterface(iface >= 0 ? iface : 0); } @Override public boolean open() { boolean ret = openFTDI(); if(ret) { // Initialize UsbRequest requestIN = new SafeUsbRequest(); requestIN.initialize(connection, inEndpoint); // Restart the working thread if it has been killed before and get and claim interface restartWorkingThread(); restartWriteThread(); // Pass references to the threads setThreadsParams(requestIN, outEndpoint); asyncMode = true; return true; }else { return false; } } @Override public void close() { setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SET_MODEM_CTRL_DEFAULT3, 0, null); setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SET_MODEM_CTRL_DEFAULT4, 0, null); currentSioSetData = 0x0000; killWorkingThread(); killWriteThread(); connection.releaseInterface(mInterface); } @Override public boolean syncOpen() { boolean ret = openFTDI(); if(ret) { setSyncParams(inEndpoint, outEndpoint); asyncMode = false; return true; }else { return false; } } @Override public void syncClose() { setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SET_MODEM_CTRL_DEFAULT3, 0, null); setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SET_MODEM_CTRL_DEFAULT4, 0, null); currentSioSetData = 0x0000; connection.releaseInterface(mInterface); } @Override public void setBaudRate(int baudRate) { int value = 0; if(baudRate >= 0 && baudRate <= 300 ) value = FTDI_BAUDRATE_300; else if(baudRate > 300 && baudRate <= 600) value = FTDI_BAUDRATE_600; else if(baudRate > 600 && baudRate <= 1200) value = FTDI_BAUDRATE_1200; else if(baudRate > 1200 && baudRate <= 2400) value = FTDI_BAUDRATE_2400; else if(baudRate > 2400 && baudRate <= 4800) value = FTDI_BAUDRATE_4800; else if(baudRate > 4800 && baudRate <= 9600) value = FTDI_BAUDRATE_9600; else if(baudRate > 9600 && baudRate <=19200) value = FTDI_BAUDRATE_19200; else if(baudRate > 19200 && baudRate <= 38400) value = FTDI_BAUDRATE_38400; else if(baudRate > 19200 && baudRate <= 57600) value = FTDI_BAUDRATE_57600; else if(baudRate > 57600 && baudRate <= 115200) value = FTDI_BAUDRATE_115200; else if(baudRate > 115200 && baudRate <= 230400) value = FTDI_BAUDRATE_230400; else if(baudRate > 230400 && baudRate <= 460800) value = FTDI_BAUDRATE_460800; else if(baudRate > 460800 && baudRate <= 921600) value = FTDI_BAUDRATE_921600; else if(baudRate > 921600) value = FTDI_BAUDRATE_921600; else value = FTDI_BAUDRATE_9600; setControlCommand(FTDI_SIO_SET_BAUD_RATE, value, 0, null); } @Override public void setDataBits(int dataBits) { switch(dataBits) { case UsbSerialInterface.DATA_BITS_5: currentSioSetData |= 1; currentSioSetData &= ~(1 << 1); currentSioSetData |= (1 << 2); currentSioSetData &= ~(1 << 3); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.DATA_BITS_6: currentSioSetData &= ~1; currentSioSetData |= (1 << 1); currentSioSetData |= (1 << 2); currentSioSetData &= ~(1 << 3); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.DATA_BITS_7: currentSioSetData |= 1; currentSioSetData |= (1 << 1); currentSioSetData |= (1 << 2); currentSioSetData &= ~(1 << 3); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.DATA_BITS_8: currentSioSetData &= ~1; currentSioSetData &= ~(1 << 1); currentSioSetData &= ~(1 << 2); currentSioSetData |= (1 << 3); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; default: currentSioSetData &= ~1; currentSioSetData &= ~(1 << 1); currentSioSetData &= ~(1 << 2); currentSioSetData |= (1 << 3); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; } } @Override public void setStopBits(int stopBits) { switch(stopBits) { case UsbSerialInterface.STOP_BITS_1: currentSioSetData &= ~(1 << 11); currentSioSetData &= ~(1 << 12); currentSioSetData &= ~(1 << 13); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.STOP_BITS_15: currentSioSetData |= (1 << 11); currentSioSetData &= ~(1 << 12); currentSioSetData &= ~(1 << 13); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.STOP_BITS_2: currentSioSetData &= ~(1 << 11); currentSioSetData |= (1 << 12); currentSioSetData &= ~(1 << 13); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; default: currentSioSetData &= ~(1 << 11); currentSioSetData &= ~(1 << 12); currentSioSetData &= ~(1 << 13); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); } } @Override public void setParity(int parity) { switch(parity) { case UsbSerialInterface.PARITY_NONE: currentSioSetData &= ~(1 << 8); currentSioSetData &= ~(1 << 9); currentSioSetData &= ~(1 << 10); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.PARITY_ODD: currentSioSetData |= (1 << 8); currentSioSetData &= ~(1 << 9); currentSioSetData &= ~(1 << 10); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.PARITY_EVEN: currentSioSetData &= ~(1 << 8); currentSioSetData |= (1 << 9); currentSioSetData &= ~(1 << 10); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.PARITY_MARK: currentSioSetData |= (1 << 8); currentSioSetData |= (1 << 9); currentSioSetData &= ~(1 << 10); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; case UsbSerialInterface.PARITY_SPACE: currentSioSetData &= ~(1 << 8); currentSioSetData &= ~(1 << 9); currentSioSetData |= (1 << 10); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; default: currentSioSetData &= ~(1 << 8); currentSioSetData &= ~(1 << 9); currentSioSetData &= ~(1 << 10); setControlCommand(FTDI_SIO_SET_DATA, currentSioSetData, 0, null); break; } } @Override public void setFlowControl(int flowControl) { switch(flowControl) { case UsbSerialInterface.FLOW_CONTROL_OFF: setControlCommand(FTDI_SIO_SET_FLOW_CTRL, FTDI_SET_FLOW_CTRL_DEFAULT, 0, null); rtsCtsEnabled = false; dtrDsrEnabled = false; break; case UsbSerialInterface.FLOW_CONTROL_RTS_CTS: rtsCtsEnabled = true; dtrDsrEnabled = false; int indexRTSCTS = 0x0001; setControlCommand(FTDI_SIO_SET_FLOW_CTRL, FTDI_SET_FLOW_CTRL_DEFAULT, indexRTSCTS, null); break; case UsbSerialInterface.FLOW_CONTROL_DSR_DTR: dtrDsrEnabled = true; rtsCtsEnabled = false; int indexDSRDTR = 0x0002; setControlCommand(FTDI_SIO_SET_FLOW_CTRL, FTDI_SET_FLOW_CTRL_DEFAULT, indexDSRDTR , null); break; case UsbSerialInterface.FLOW_CONTROL_XON_XOFF: int indexXONXOFF = 0x0004; int wValue = 0x1311; setControlCommand(FTDI_SIO_SET_FLOW_CTRL, wValue, indexXONXOFF , null); break; default: setControlCommand(FTDI_SIO_SET_FLOW_CTRL, FTDI_SET_FLOW_CTRL_DEFAULT, 0, null); break; } } @Override public void setRTS(boolean state) { if(state) { setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SIO_SET_RTS_HIGH, 0, null); }else { setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SIO_SET_RTS_LOW, 0, null); } } @Override public void setDTR(boolean state) { if(state) { setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SIO_SET_DTR_HIGH, 0, null); }else { setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SIO_SET_DTR_LOW, 0, null); } } @Override public void getCTS(UsbCTSCallback ctsCallback) { this.ctsCallback = ctsCallback; } @Override public void getDSR(UsbDSRCallback dsrCallback) { this.dsrCallback = dsrCallback; } @Override public void getBreak(UsbBreakCallback breakCallback) { this.breakCallback = breakCallback; } @Override public void getFrame(UsbFrameCallback frameCallback) { this.frameCallback = frameCallback; } @Override public void getOverrun(UsbOverrunCallback overrunCallback) { this.overrunCallback = overrunCallback; } @Override public void getParity(UsbParityCallback parityCallback) { this.parityCallback = parityCallback; } private boolean openFTDI() { if(connection.claimInterface(mInterface, true)) { Log.i(CLASS_ID, "Interface succesfully claimed"); }else { Log.i(CLASS_ID, "Interface could not be claimed"); return false; } // Assign endpoints int numberEndpoints = mInterface.getEndpointCount(); for(int i=0;i<=numberEndpoints-1;i++) { UsbEndpoint endpoint = mInterface.getEndpoint(i); if(endpoint.getType() == UsbConstants.USB_ENDPOINT_XFER_BULK && endpoint.getDirection() == UsbConstants.USB_DIR_IN) { inEndpoint = endpoint; }else { outEndpoint = endpoint; } } // Default Setup firstTime = true; if(setControlCommand(FTDI_SIO_RESET, 0x00, 0, null) < 0) return false; if(setControlCommand(FTDI_SIO_SET_DATA, FTDI_SET_DATA_DEFAULT, 0, null) < 0) return false; currentSioSetData = FTDI_SET_DATA_DEFAULT; if(setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SET_MODEM_CTRL_DEFAULT1, 0, null) < 0) return false; if(setControlCommand(FTDI_SIO_MODEM_CTRL, FTDI_SET_MODEM_CTRL_DEFAULT2, 0, null) < 0) return false; if(setControlCommand(FTDI_SIO_SET_FLOW_CTRL, FTDI_SET_FLOW_CTRL_DEFAULT, 0, null) < 0) return false; if(setControlCommand(FTDI_SIO_SET_BAUD_RATE, FTDI_BAUDRATE_9600, 0, null) < 0) return false; // Flow control disabled by default rtsCtsEnabled = false; dtrDsrEnabled = false; return true; } private int setControlCommand(int request, int value, int index, byte[] data) { int dataLength = 0; if(data != null) { dataLength = data.length; } int response = connection.controlTransfer(FTDI_REQTYPE_HOST2DEVICE, request, value, mInterface.getId() + 1 + index, data, dataLength, USB_TIMEOUT); Log.i(CLASS_ID,"Control Transfer Response: " + String.valueOf(response)); return response; } public class FTDIUtilities { // Special treatment needed to FTDI devices public byte[] adaptArray(byte[] ftdiData) { int length = ftdiData.length; if(length > 64) { int n = 1; int p = 64; // Precalculate length without FTDI headers while(p < length) { n++; p = n*64; } int realLength = length - n*2; byte[] data = new byte[realLength]; copyData(ftdiData, data); return data; }else { return Arrays.copyOfRange(ftdiData, 2, length); } } public void checkModemStatus(byte[] data) { if(data.length == 0) // Safeguard for zero length arrays return; boolean cts = (data[0] & 0x10) == 0x10; boolean dsr = (data[0] & 0x20) == 0x20; if(firstTime) // First modem status received { ctsState = cts; dsrState = dsr; if(rtsCtsEnabled && ctsCallback != null) ctsCallback.onCTSChanged(ctsState); if(dtrDsrEnabled && dsrCallback != null) dsrCallback.onDSRChanged(dsrState); firstTime = false; return; } if(rtsCtsEnabled && cts != ctsState && ctsCallback != null) //CTS { ctsState = !ctsState; ctsCallback.onCTSChanged(ctsState); } if(dtrDsrEnabled && dsr != dsrState && dsrCallback != null) //DSR { dsrState = !dsrState; dsrCallback.onDSRChanged(dsrState); } if(parityCallback != null) // Parity error checking { if((data[1] & 0x04) == 0x04) { parityCallback.onParityError(); } } if(frameCallback != null) // Frame error checking { if((data[1] & 0x08) == 0x08) { frameCallback.onFramingError(); } } if(overrunCallback != null) // Overrun error checking { if((data[1] & 0x02) == 0x02) { overrunCallback.onOverrunError(); } } if(breakCallback != null) // Break interrupt checking { if((data[1] & 0x10) == 0x10) { breakCallback.onBreakInterrupt(); } } } // Copy data without FTDI headers private void copyData(byte[] src, byte[] dst) { int i = 0; // src index int j = 0; // dst index while(i <= src.length-1) { if(i != 0 && i != 1) { if(i % 64 == 0 && i >= 64) { i += 2; }else { dst[j] = src[i]; i++; j++; } }else { i++; } } } } @Override public int syncRead(byte[] buffer, int timeout) { long beginTime = System.currentTimeMillis(); long stopTime = beginTime + timeout; if(asyncMode) { return -1; } if(buffer == null) { return 0; } int n = buffer.length / 62; if(buffer.length % 62 != 0) { n++; } byte[] tempBuffer = new byte[buffer.length + n * 2]; int readen = 0; do { int timeLeft = 0; if(timeout > 0) { timeLeft = (int) (stopTime - System.currentTimeMillis()); if (timeLeft <= 0) { break; } } int numberBytes = connection.bulkTransfer(inEndpoint, tempBuffer, tempBuffer.length, timeLeft); if(numberBytes > 2) // Data received { byte[] newBuffer = this.ftdiUtilities.adaptArray(tempBuffer); System.arraycopy(newBuffer, 0, buffer, 0, buffer.length); int p = numberBytes / 64; if(numberBytes % 64 != 0) { p++; } readen = numberBytes - p * 2; } }while(readen <= 0); return readen; } }
package rajawali.animation; import android.os.SystemClock; import android.view.animation.Interpolator; import android.view.animation.LinearInterpolator; import java.util.ArrayList; import java.util.List; import java.util.Timer; import java.util.TimerTask; import rajawali.ATransformable3D; public class Animation3D { public static final int INFINITE = -1; public static final int RESTART = 1; protected int mRepeatMode = RESTART; public static final int REVERSE = 2; protected long mDuration; protected long mStart = -1; protected long mLength = -1; protected Interpolator mInterpolator; protected int mRepeatCount; protected int mNumRepeats; protected int mDirection = 1; protected long mStartOffset; protected long mStartTime; protected long mDelay; protected long mUpdateRate = 1000 / 60; protected boolean mHasStarted; protected boolean mHasEnded; protected boolean mIsPaused; protected List<Animation3DListener> mAnimationListeners = new ArrayList<Animation3DListener>(); protected Timer mTimer; protected ATransformable3D mTransformable3D; protected Animation3D mInstance; protected float mInterpolatedTime; public Animation3D() { mInstance = this; } public void cancel() { if (mTimer != null) { TimerManager.getInstance().killTimer(mTimer); } } public void reset() { mStartTime = SystemClock.uptimeMillis(); mNumRepeats = 0; } public void start() { if (mInterpolator == null) mInterpolator = new LinearInterpolator(); reset(); if (mTimer == null) mTimer = TimerManager.getInstance().createNewTimer(); try { mTimer.scheduleAtFixedRate(new UpdateTimeTask(), mDelay, mUpdateRate); } catch (IllegalStateException e) { // timer was cancelled mTimer = TimerManager.getInstance().createNewTimer(); // try once more try { mTimer.scheduleAtFixedRate(new UpdateTimeTask(), mDelay, mUpdateRate); } catch (IllegalStateException ie) { } } for (int i = 0, j = mAnimationListeners.size(); i < j; i++) mAnimationListeners.get(i).onAnimationStart(this); } protected void applyTransformation(float interpolatedTime) { this.mInterpolatedTime = interpolatedTime; } public float getCurrentTime() { return this.mInterpolatedTime; } public ATransformable3D getTransformable3D() { return mTransformable3D; } public void setTransformable3D(ATransformable3D transformable3D) { mTransformable3D = transformable3D; } public void setAnimationListener(Animation3DListener animationListener) { mAnimationListeners.clear(); mAnimationListeners.add(animationListener); } public void addAnimationListener(Animation3DListener animationListener) { mAnimationListeners.add(animationListener); } public long getDuration() { return mDuration; } public void setDuration(long duration) { mDuration = duration; if (mLength < 0) { mLength = mDuration; } if (mStart < 0) { mStart = 0; } } public long getStart() { return mStart; } public void setStart(long start) { mStart = start; } public long getLength() { return mLength; } public void setLength(long length) { mLength = length; } public Interpolator getInterpolator() { return mInterpolator; } /** * AccelerateDecelerateInterpolator, AccelerateInterpolator, * AnticipateInterpolator, AnticipateOvershootInterpolator, * BounceInterpolator, CycleInterpolator, DecelerateInterpolator, * LinearInterpolator, OvershootInterpolator * * @param interpolator */ public void setInterpolator(Interpolator interpolator) { mInterpolator = interpolator; } public int getRepeatCount() { return mRepeatCount; } public void setRepeatCount(int repeatCount) { mRepeatCount = repeatCount; } public int getRepeatMode() { return mRepeatMode; } public void setRepeatMode(int repeatMode) { mRepeatMode = repeatMode; } public boolean isHasStarted() { return mHasStarted; } public void setHasStarted(boolean hasStarted) { this.mHasStarted = hasStarted; } public boolean isHasEnded() { return mHasEnded; } public void setHasEnded(boolean hasEnded) { this.mHasEnded = hasEnded; } public boolean isPaused() { return mIsPaused; } public void setPaused(boolean doPause) { mIsPaused = doPause; } public long getDelay() { return mDelay; } public void setDelay(long delay) { mDelay = delay; } public long getUpdateRate() { return mUpdateRate; } public void setUpdateRate(long updateRate) { this.mUpdateRate = updateRate; } class UpdateTimeTask extends TimerTask { long millis; float interpolatedTime; long timeInPause; boolean wasPaused = false; boolean firstRun = true; int i, j; public void run() { if (firstRun) { firstRun = false; mStartTime = SystemClock.uptimeMillis(); } if (mIsPaused) { if (!wasPaused) timeInPause = SystemClock.uptimeMillis(); wasPaused = true; return; } else { if (wasPaused) mStartTime += SystemClock.uptimeMillis() - timeInPause; wasPaused = false; } millis = SystemClock.uptimeMillis() - mStartTime; if (millis > mDuration) { if (mRepeatCount == mNumRepeats) { setHasEnded(true); cancel(); for (i = 0, j = mAnimationListeners.size(); i < j; i++) mAnimationListeners.get(i).onAnimationEnd(mInstance); } else { if (mRepeatMode == REVERSE) mDirection *= -1; mStartTime = SystemClock.uptimeMillis(); mNumRepeats++; for (i = 0, j = mAnimationListeners.size(); i < j; i++) mAnimationListeners.get(i).onAnimationRepeat(mInstance); } millis = mDuration; } if (mDirection == -1) { millis = mDuration - millis; } if (millis > mStart && millis < (mStart + mLength)) { float diff = (float) (millis - mStart); interpolatedTime = mInterpolator.getInterpolation(diff / (float) mLength); setHasStarted(true); applyTransformation(interpolatedTime > 1 ? 1 : interpolatedTime < 0 ? 0 : interpolatedTime); for (i = 0, j = mAnimationListeners.size(); i < j; i++) mAnimationListeners.get(i).onAnimationUpdate(mInstance, interpolatedTime); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.iterate; import static org.apache.phoenix.coprocessor.BaseScannerRegionObserver.EXPECTED_UPPER_REGION_KEY; import static org.apache.phoenix.monitoring.PhoenixMetrics.CountMetric.FAILED_QUERY; import static org.apache.phoenix.monitoring.PhoenixMetrics.CountMetric.QUERY_TIMEOUT; import static org.apache.phoenix.util.ByteUtil.EMPTY_BYTE_ARRAY; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableSet; import java.util.Queue; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.compile.RowProjector; import org.apache.phoenix.compile.ScanRanges; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.exception.SQLExceptionInfo; import org.apache.phoenix.filter.ColumnProjectionFilter; import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr; import org.apache.phoenix.parse.FilterableStatement; import org.apache.phoenix.parse.HintNode.Hint; import org.apache.phoenix.query.ConnectionQueryServices; import org.apache.phoenix.query.KeyRange; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; import org.apache.phoenix.schema.MetaDataClient; import org.apache.phoenix.schema.PColumnFamily; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.PTable.IndexType; import org.apache.phoenix.schema.PTable.ViewType; import org.apache.phoenix.schema.SaltingUtil; import org.apache.phoenix.schema.StaleRegionBoundaryCacheException; import org.apache.phoenix.schema.TableRef; import org.apache.phoenix.schema.stats.GuidePostsInfo; import org.apache.phoenix.schema.stats.PTableStats; import org.apache.phoenix.util.LogUtil; import org.apache.phoenix.util.SQLCloseables; import org.apache.phoenix.util.ScanUtil; import org.apache.phoenix.util.SchemaUtil; import org.apache.phoenix.util.ServerUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; /** * * Class that parallelizes the scan over a table using the ExecutorService provided. Each region of the table will be scanned in parallel with * the results accessible through {@link #getIterators()} * * * @since 0.1 */ public abstract class BaseResultIterators extends ExplainTable implements ResultIterators { private static final Logger logger = LoggerFactory.getLogger(BaseResultIterators.class); private static final int ESTIMATED_GUIDEPOSTS_PER_REGION = 20; private final List<List<Scan>> scans; private final List<KeyRange> splits; private final PTableStats tableStats; private final byte[] physicalTableName; private final QueryPlan plan; protected final String scanId; // TODO: too much nesting here - breakup into new classes. private final List<List<List<Pair<Scan,Future<PeekingResultIterator>>>>> allFutures; static final Function<HRegionLocation, KeyRange> TO_KEY_RANGE = new Function<HRegionLocation, KeyRange>() { @Override public KeyRange apply(HRegionLocation region) { return KeyRange.getKeyRange(region.getRegionInfo().getStartKey(), region.getRegionInfo().getEndKey()); } }; private PTable getTable() { return plan.getTableRef().getTable(); } private boolean useStats() { Scan scan = context.getScan(); boolean isPointLookup = context.getScanRanges().isPointLookup(); /* * Don't use guide posts if: * 1) We're doing a point lookup, as HBase is fast enough at those * to not need them to be further parallelized. TODO: perf test to verify * 2) We're collecting stats, as in this case we need to scan entire * regions worth of data to track where to put the guide posts. */ if (isPointLookup || ScanUtil.isAnalyzeTable(scan)) { return false; } return true; } public BaseResultIterators(QueryPlan plan, Integer perScanLimit) throws SQLException { super(plan.getContext(), plan.getTableRef(), plan.getGroupBy(), plan.getOrderBy(), plan.getStatement().getHint(), plan.getLimit()); this.plan = plan; StatementContext context = plan.getContext(); TableRef tableRef = plan.getTableRef(); PTable table = tableRef.getTable(); FilterableStatement statement = plan.getStatement(); RowProjector projector = plan.getProjector(); physicalTableName = table.getPhysicalName().getBytes(); tableStats = useStats() ? new MetaDataClient(context.getConnection()).getTableStats(table) : PTableStats.EMPTY_STATS; Scan scan = context.getScan(); // Used to tie all the scans together during logging scanId = UUID.randomUUID().toString(); Map<byte [], NavigableSet<byte []>> familyMap = scan.getFamilyMap(); boolean keyOnlyFilter = familyMap.isEmpty() && context.getWhereCoditionColumns().isEmpty(); if (projector.isProjectEmptyKeyValue()) { // If nothing projected into scan and we only have one column family, just allow everything // to be projected and use a FirstKeyOnlyFilter to skip from row to row. This turns out to // be quite a bit faster. // Where condition columns also will get added into familyMap // When where conditions are present, we can not add FirstKeyOnlyFilter at beginning. if (familyMap.isEmpty() && context.getWhereCoditionColumns().isEmpty() && table.getColumnFamilies().size() == 1) { // Project the one column family. We must project a column family since it's possible // that there are other non declared column families that we need to ignore. scan.addFamily(table.getColumnFamilies().get(0).getName().getBytes()); } else { byte[] ecf = SchemaUtil.getEmptyColumnFamily(table); // Project empty key value unless the column family containing it has // been projected in its entirety. if (!familyMap.containsKey(ecf) || familyMap.get(ecf) != null) { scan.addColumn(ecf, QueryConstants.EMPTY_COLUMN_BYTES); } } } else if (table.getViewType() == ViewType.MAPPED) { // Since we don't have the empty key value in MAPPED tables, we must select all CFs in HRS. But only the // selected column values are returned back to client for (PColumnFamily family : table.getColumnFamilies()) { scan.addFamily(family.getName().getBytes()); } } // Add FirstKeyOnlyFilter if there are no references to key value columns if (keyOnlyFilter) { ScanUtil.andFilterAtBeginning(scan, new FirstKeyOnlyFilter()); } // TODO adding all CFs here is not correct. It should be done only after ColumnProjectionOptimization. if (perScanLimit != null) { ScanUtil.andFilterAtEnd(scan, new PageFilter(perScanLimit)); } doColumnProjectionOptimization(context, scan, table, statement); this.scans = getParallelScans(); List<KeyRange> splitRanges = Lists.newArrayListWithExpectedSize(scans.size() * ESTIMATED_GUIDEPOSTS_PER_REGION); for (List<Scan> scanList : scans) { for (Scan aScan : scanList) { splitRanges.add(KeyRange.getKeyRange(aScan.getStartRow(), aScan.getStopRow())); } } this.splits = ImmutableList.copyOf(splitRanges); // If split detected, this will be more than one, but that's unlikely this.allFutures = Lists.newArrayListWithExpectedSize(1); } private void doColumnProjectionOptimization(StatementContext context, Scan scan, PTable table, FilterableStatement statement) { Map<byte[], NavigableSet<byte[]>> familyMap = scan.getFamilyMap(); if (familyMap != null && !familyMap.isEmpty()) { // columnsTracker contain cf -> qualifiers which should get returned. Map<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>> columnsTracker = new TreeMap<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>>(); Set<byte[]> conditionOnlyCfs = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR); int referencedCfCount = familyMap.size(); for (Pair<byte[], byte[]> whereCol : context.getWhereCoditionColumns()) { if (!(familyMap.containsKey(whereCol.getFirst()))) { referencedCfCount++; } } boolean useOptimization; if (statement.getHint().hasHint(Hint.SEEK_TO_COLUMN)) { // Do not use the optimization useOptimization = false; } else if (statement.getHint().hasHint(Hint.NO_SEEK_TO_COLUMN)) { // Strictly use the optimization useOptimization = true; } else { // when referencedCfCount is >1 and no Hints, we are not using the optimization useOptimization = referencedCfCount == 1; } if (useOptimization) { for (Entry<byte[], NavigableSet<byte[]>> entry : familyMap.entrySet()) { ImmutableBytesPtr cf = new ImmutableBytesPtr(entry.getKey()); NavigableSet<byte[]> qs = entry.getValue(); NavigableSet<ImmutableBytesPtr> cols = null; if (qs != null) { cols = new TreeSet<ImmutableBytesPtr>(); for (byte[] q : qs) { cols.add(new ImmutableBytesPtr(q)); } } columnsTracker.put(cf, cols); } } // Making sure that where condition CFs are getting scanned at HRS. for (Pair<byte[], byte[]> whereCol : context.getWhereCoditionColumns()) { if (useOptimization) { if (!(familyMap.containsKey(whereCol.getFirst()))) { scan.addFamily(whereCol.getFirst()); conditionOnlyCfs.add(whereCol.getFirst()); } } else { if (familyMap.containsKey(whereCol.getFirst())) { // where column's CF is present. If there are some specific columns added against this CF, we // need to ensure this where column also getting added in it. // If the select was like select cf1.*, then that itself will select the whole CF. So no need to // specifically add the where column. Adding that will remove the cf1.* stuff and only this // where condition column will get returned! NavigableSet<byte[]> cols = familyMap.get(whereCol.getFirst()); // cols is null means the whole CF will get scanned. if (cols != null) { scan.addColumn(whereCol.getFirst(), whereCol.getSecond()); } } else { // where column's CF itself is not present in family map. We need to add the column scan.addColumn(whereCol.getFirst(), whereCol.getSecond()); } } } if (useOptimization && !columnsTracker.isEmpty()) { for (ImmutableBytesPtr f : columnsTracker.keySet()) { // This addFamily will remove explicit cols in scan familyMap and make it as entire row. // We don't want the ExplicitColumnTracker to be used. Instead we have the ColumnProjectionFilter scan.addFamily(f.get()); } // We don't need this filter for aggregates, as we're not returning back what's // in the scan in this case. We still want the other optimization that causes // the ExplicitColumnTracker not to be used, though. if (!(statement.isAggregate())) { ScanUtil.andFilterAtEnd(scan, new ColumnProjectionFilter(SchemaUtil.getEmptyColumnFamily(table), columnsTracker, conditionOnlyCfs)); } } } } @Override public List<KeyRange> getSplits() { return splits; } @Override public List<List<Scan>> getScans() { return scans; } private static List<byte[]> toBoundaries(List<HRegionLocation> regionLocations) { int nBoundaries = regionLocations.size() - 1; List<byte[]> ranges = Lists.newArrayListWithExpectedSize(nBoundaries); for (int i = 0; i < nBoundaries; i++) { HRegionInfo regionInfo = regionLocations.get(i).getRegionInfo(); ranges.add(regionInfo.getEndKey()); } return ranges; } private static int getIndexContainingInclusive(List<byte[]> boundaries, byte[] inclusiveKey) { int guideIndex = Collections.binarySearch(boundaries, inclusiveKey, Bytes.BYTES_COMPARATOR); // If we found an exact match, return the index+1, as the inclusiveKey will be contained // in the next region (since we're matching on the end boundary). guideIndex = (guideIndex < 0 ? -(guideIndex + 1) : (guideIndex + 1)); return guideIndex; } private static int getIndexContainingExclusive(List<byte[]> boundaries, byte[] exclusiveKey) { int guideIndex = Collections.binarySearch(boundaries, exclusiveKey, Bytes.BYTES_COMPARATOR); // If we found an exact match, return the index we found as the exclusiveKey won't be // contained in the next region as with getIndexContainingInclusive. guideIndex = (guideIndex < 0 ? -(guideIndex + 1) : guideIndex); return guideIndex; } private List<byte[]> getGuidePosts() { /* * Don't use guide posts if: * 1) We're doing a point lookup, as HBase is fast enough at those * to not need them to be further parallelized. TODO: pref test to verify * 2) We're collecting stats, as in this case we need to scan entire * regions worth of data to track where to put the guide posts. */ if (!useStats()) { return Collections.emptyList(); } List<byte[]> gps = null; PTable table = getTable(); Map<byte[],GuidePostsInfo> guidePostMap = tableStats.getGuidePosts(); byte[] defaultCF = SchemaUtil.getEmptyColumnFamily(getTable()); if (table.getColumnFamilies().isEmpty()) { // For sure we can get the defaultCF from the table if (guidePostMap.get(defaultCF) != null) { gps = guidePostMap.get(defaultCF).getGuidePosts(); } } else { Scan scan = context.getScan(); if (scan.getFamilyMap().size() > 0 && !scan.getFamilyMap().containsKey(defaultCF)) { // If default CF is not used in scan, use first CF referenced in scan GuidePostsInfo guidePostsInfo = guidePostMap.get(scan.getFamilyMap().keySet().iterator().next()); if (guidePostsInfo != null) { gps = guidePostsInfo.getGuidePosts(); } } else { // Otherwise, favor use of default CF. if (guidePostMap.get(defaultCF) != null) { gps = guidePostMap.get(defaultCF).getGuidePosts(); } } } if (gps == null) { return Collections.emptyList(); } return gps; } private static String toString(List<byte[]> gps) { StringBuilder buf = new StringBuilder(gps.size() * 100); buf.append("["); for (int i = 0; i < gps.size(); i++) { buf.append(Bytes.toStringBinary(gps.get(i))); buf.append(","); if (i > 0 && i < gps.size()-1 && (i % 10) == 0) { buf.append("\n"); } } buf.setCharAt(buf.length()-1, ']'); return buf.toString(); } private List<Scan> addNewScan(List<List<Scan>> parallelScans, List<Scan> scans, Scan scan, byte[] startKey, boolean crossedRegionBoundary) { PTable table = getTable(); boolean startNewScanList = false; if (!plan.isRowKeyOrdered()) { startNewScanList = true; } else if (crossedRegionBoundary) { if (table.getIndexType() == IndexType.LOCAL) { startNewScanList = true; } else if (table.getBucketNum() != null) { startNewScanList = scans.isEmpty() || ScanUtil.crossesPrefixBoundary(startKey, ScanUtil.getPrefix(scans.get(scans.size()-1).getStartRow(), SaltingUtil.NUM_SALTING_BYTES), SaltingUtil.NUM_SALTING_BYTES); } } if (scan != null) { scans.add(scan); } if (startNewScanList && !scans.isEmpty()) { parallelScans.add(scans); scans = Lists.newArrayListWithExpectedSize(1); } return scans; } private List<List<Scan>> getParallelScans() throws SQLException { return getParallelScans(EMPTY_BYTE_ARRAY, EMPTY_BYTE_ARRAY); } /** * Compute the list of parallel scans to run for a given query. The inner scans * may be concatenated together directly, while the other ones may need to be * merge sorted, depending on the query. * @return list of parallel scans to run for a given query. * @throws SQLException */ private List<List<Scan>> getParallelScans(byte[] startKey, byte[] stopKey) throws SQLException { Scan scan = context.getScan(); List<HRegionLocation> regionLocations = context.getConnection().getQueryServices() .getAllTableRegions(physicalTableName); List<byte[]> regionBoundaries = toBoundaries(regionLocations); ScanRanges scanRanges = context.getScanRanges(); PTable table = getTable(); boolean isSalted = table.getBucketNum() != null; boolean isLocalIndex = table.getIndexType() == IndexType.LOCAL; List<byte[]> gps = getGuidePosts(); if (logger.isDebugEnabled()) { logger.debug("Guideposts: " + toString(gps)); } boolean traverseAllRegions = isSalted || isLocalIndex; if (!traverseAllRegions) { byte[] scanStartRow = scan.getStartRow(); if (scanStartRow.length != 0 && Bytes.compareTo(scanStartRow, startKey) > 0) { startKey = scanStartRow; } byte[] scanStopRow = scan.getStopRow(); if (stopKey.length == 0 || Bytes.compareTo(scanStopRow, stopKey) < 0) { stopKey = scanStopRow; } } int regionIndex = 0; int stopIndex = regionBoundaries.size(); if (startKey.length > 0) { regionIndex = getIndexContainingInclusive(regionBoundaries, startKey); } if (stopKey.length > 0) { stopIndex = Math.min(stopIndex, regionIndex + getIndexContainingExclusive(regionBoundaries.subList(regionIndex, stopIndex), stopKey)); if (isLocalIndex) { stopKey = regionLocations.get(stopIndex).getRegionInfo().getEndKey(); } } List<List<Scan>> parallelScans = Lists.newArrayListWithExpectedSize(stopIndex - regionIndex + 1); byte[] currentKey = startKey; int guideIndex = currentKey.length == 0 ? 0 : getIndexContainingInclusive(gps, currentKey); int gpsSize = gps.size(); int estGuidepostsPerRegion = gpsSize == 0 ? 1 : gpsSize / regionLocations.size() + 1; int keyOffset = 0; List<Scan> scans = Lists.newArrayListWithExpectedSize(estGuidepostsPerRegion); // Merge bisect with guideposts for all but the last region while (regionIndex <= stopIndex) { byte[] currentGuidePost, endKey, endRegionKey = EMPTY_BYTE_ARRAY; if (regionIndex == stopIndex) { endKey = stopKey; } else { endKey = regionBoundaries.get(regionIndex); } if (isLocalIndex) { HRegionInfo regionInfo = regionLocations.get(regionIndex).getRegionInfo(); endRegionKey = regionInfo.getEndKey(); keyOffset = ScanUtil.getRowKeyOffset(regionInfo.getStartKey(), endRegionKey); } while (guideIndex < gpsSize && (Bytes.compareTo(currentGuidePost = gps.get(guideIndex), endKey) <= 0 || endKey.length == 0)) { Scan newScan = scanRanges.intersectScan(scan, currentKey, currentGuidePost, keyOffset, false); scans = addNewScan(parallelScans, scans, newScan, currentGuidePost, false); currentKey = currentGuidePost; guideIndex++; } Scan newScan = scanRanges.intersectScan(scan, currentKey, endKey, keyOffset, true); if (isLocalIndex) { if (newScan != null) { newScan.setAttribute(EXPECTED_UPPER_REGION_KEY, endRegionKey); } else if (!scans.isEmpty()) { scans.get(scans.size()-1).setAttribute(EXPECTED_UPPER_REGION_KEY, endRegionKey); } } scans = addNewScan(parallelScans, scans, newScan, endKey, true); currentKey = endKey; regionIndex++; } if (!scans.isEmpty()) { // Add any remaining scans parallelScans.add(scans); } return parallelScans; } public static <T> List<T> reverseIfNecessary(List<T> list, boolean reverse) { if (!reverse) { return list; } return Lists.reverse(list); } /** * Executes the scan in parallel across all regions, blocking until all scans are complete. * @return the result iterators for the scan of each region */ @Override public List<PeekingResultIterator> getIterators() throws SQLException { Scan scan = context.getScan(); if (logger.isDebugEnabled()) { logger.debug(LogUtil.addCustomAnnotations("Getting iterators for " + this, ScanUtil.getCustomAnnotations(scan))); } boolean success = false; boolean isReverse = ScanUtil.isReversed(scan); boolean isLocalIndex = getTable().getIndexType() == IndexType.LOCAL; final ConnectionQueryServices services = context.getConnection().getQueryServices(); int numScans = size(); // Capture all iterators so that if something goes wrong, we close them all // The iterators list is based on the submission of work, so it may not // contain them all (for example if work was rejected from the queue) Queue<PeekingResultIterator> allIterators = new ConcurrentLinkedQueue<>(); List<PeekingResultIterator> iterators = new ArrayList<PeekingResultIterator>(numScans); final List<List<Pair<Scan,Future<PeekingResultIterator>>>> futures = Lists.newArrayListWithExpectedSize(numScans); allFutures.add(futures); SQLException toThrow = null; // Get query time out from Statement and convert from seconds back to milliseconds int queryTimeOut = context.getStatement().getQueryTimeout() * 1000; final long startTime = System.currentTimeMillis(); final long maxQueryEndTime = startTime + queryTimeOut; try { submitWork(scans, futures, allIterators, splits.size()); boolean clearedCache = false; for (List<Pair<Scan,Future<PeekingResultIterator>>> future : reverseIfNecessary(futures,isReverse)) { List<PeekingResultIterator> concatIterators = Lists.newArrayListWithExpectedSize(future.size()); for (Pair<Scan,Future<PeekingResultIterator>> scanPair : reverseIfNecessary(future,isReverse)) { try { long timeOutForScan = maxQueryEndTime - System.currentTimeMillis(); if (timeOutForScan < 0) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.OPERATION_TIMED_OUT).setMessage(". Query couldn't be completed in the alloted time: " + queryTimeOut + " ms").build().buildException(); } PeekingResultIterator iterator = scanPair.getSecond().get(timeOutForScan, TimeUnit.MILLISECONDS); concatIterators.add(iterator); } catch (ExecutionException e) { try { // Rethrow as SQLException throw ServerUtil.parseServerException(e); } catch (StaleRegionBoundaryCacheException e2) { // Catch only to try to recover from region boundary cache being out of date List<List<Pair<Scan,Future<PeekingResultIterator>>>> newFutures = Lists.newArrayListWithExpectedSize(2); if (!clearedCache) { // Clear cache once so that we rejigger job based on new boundaries services.clearTableRegionCache(physicalTableName); clearedCache = true; } // Resubmit just this portion of work again Scan oldScan = scanPair.getFirst(); byte[] startKey = oldScan.getStartRow(); byte[] endKey = oldScan.getStopRow(); if (isLocalIndex) { endKey = oldScan.getAttribute(EXPECTED_UPPER_REGION_KEY); } List<List<Scan>> newNestedScans = this.getParallelScans(startKey, endKey); // Add any concatIterators that were successful so far // as we need these to be in order addIterator(iterators, concatIterators); concatIterators = Lists.newArrayList(); submitWork(newNestedScans, newFutures, allIterators, newNestedScans.size()); allFutures.add(newFutures); for (List<Pair<Scan,Future<PeekingResultIterator>>> newFuture : reverseIfNecessary(newFutures, isReverse)) { for (Pair<Scan,Future<PeekingResultIterator>> newScanPair : reverseIfNecessary(newFuture, isReverse)) { // Immediate do a get (not catching exception again) and then add the iterators we // get back immediately. They'll be sorted as expected, since they're replacing the // original one. long timeOutForScan = maxQueryEndTime - System.currentTimeMillis(); if (timeOutForScan < 0) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.OPERATION_TIMED_OUT).setMessage(". Query couldn't be completed in the alloted time: " + queryTimeOut + " ms").build().buildException(); } PeekingResultIterator iterator = newScanPair.getSecond().get(timeOutForScan, TimeUnit.MILLISECONDS); iterators.add(iterator); } } } } } addIterator(iterators, concatIterators); } success = true; return iterators; } catch (TimeoutException e) { QUERY_TIMEOUT.increment(); // thrown when a thread times out waiting for the future.get() call to return toThrow = new SQLExceptionInfo.Builder(SQLExceptionCode.OPERATION_TIMED_OUT) .setMessage(". Query couldn't be completed in the alloted time: " + queryTimeOut + " ms") .setRootCause(e).build().buildException(); } catch (SQLException e) { toThrow = e; } catch (Exception e) { toThrow = ServerUtil.parseServerException(e); } finally { try { if (!success) { try { close(); } catch (Exception e) { if (toThrow == null) { toThrow = ServerUtil.parseServerException(e); } else { toThrow.setNextException(ServerUtil.parseServerException(e)); } } finally { try { SQLCloseables.closeAll(allIterators); } catch (Exception e) { if (toThrow == null) { toThrow = ServerUtil.parseServerException(e); } else { toThrow.setNextException(ServerUtil.parseServerException(e)); } } } } } finally { if (toThrow != null) { FAILED_QUERY.increment(); throw toThrow; } } } return null; // Not reachable } @Override public void close() throws SQLException { // Don't call cancel on already started work, as it causes the HConnection // to get into a funk. Instead, just cancel queued work. boolean cancelledWork = false; try { for (List<List<Pair<Scan,Future<PeekingResultIterator>>>> futures : allFutures) { for (List<Pair<Scan,Future<PeekingResultIterator>>> futureScans : futures) { for (Pair<Scan,Future<PeekingResultIterator>> futurePair : futureScans) { // When work is rejected, we may have null futurePair entries, because // we randomize these and set them as they're submitted. if (futurePair != null) { Future<PeekingResultIterator> future = futurePair.getSecond(); if (future != null) { cancelledWork |= future.cancel(false); } } } } } } finally { if (cancelledWork) { context.getConnection().getQueryServices().getExecutor().purge(); } } } private void addIterator(List<PeekingResultIterator> parentIterators, List<PeekingResultIterator> childIterators) { if (!childIterators.isEmpty()) { parentIterators.add(ConcatResultIterator.newIterator(childIterators)); } } protected static final class ScanLocator { private final int outerListIndex; private final int innerListIndex; private final Scan scan; public ScanLocator(Scan scan, int outerListIndex, int innerListIndex) { this.outerListIndex = outerListIndex; this.innerListIndex = innerListIndex; this.scan = scan; } public int getOuterListIndex() { return outerListIndex; } public int getInnerListIndex() { return innerListIndex; } public Scan getScan() { return scan; } } abstract protected String getName(); abstract protected void submitWork(List<List<Scan>> nestedScans, List<List<Pair<Scan,Future<PeekingResultIterator>>>> nestedFutures, Queue<PeekingResultIterator> allIterators, int estFlattenedSize); @Override public int size() { return this.scans.size(); } @Override public void explain(List<String> planSteps) { boolean displayChunkCount = context.getConnection().getQueryServices().getProps().getBoolean( QueryServices.EXPLAIN_CHUNK_COUNT_ATTRIB, QueryServicesOptions.DEFAULT_EXPLAIN_CHUNK_COUNT); StringBuilder buf = new StringBuilder(); buf.append("CLIENT " + (displayChunkCount ? (this.splits.size() + "-CHUNK ") : "") + getName() + " " + size() + "-WAY "); explain(buf.toString(),planSteps); } @Override public String toString() { return "ResultIterators [name=" + getName() + ",id=" + scanId + ",scans=" + scans + "]"; } }
package org.apache.lucene.codecs.memory; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.MathUtil; import org.apache.lucene.util.fst.Builder; import org.apache.lucene.util.fst.FST.INPUT_TYPE; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.PositiveIntOutputs; import org.apache.lucene.util.fst.Util; import org.apache.lucene.util.packed.BlockPackedWriter; import org.apache.lucene.util.packed.MonotonicBlockPackedWriter; import org.apache.lucene.util.packed.PackedInts.FormatAndBits; import org.apache.lucene.util.packed.PackedInts; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.VERSION_CURRENT; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.BLOCK_SIZE; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.BYTES; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.NUMBER; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.FST; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.DELTA_COMPRESSED; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.GCD_COMPRESSED; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.TABLE_COMPRESSED; import static org.apache.lucene.codecs.memory.MemoryDocValuesProducer.UNCOMPRESSED; /** * Writer for {@link MemoryDocValuesFormat} */ class MemoryDocValuesConsumer extends DocValuesConsumer { final IndexOutput data, meta; final int maxDoc; final float acceptableOverheadRatio; MemoryDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension, float acceptableOverheadRatio) throws IOException { this.acceptableOverheadRatio = acceptableOverheadRatio; maxDoc = state.segmentInfo.getDocCount(); boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } } @Override public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException { addNumericField(field, values, true); } void addNumericField(FieldInfo field, Iterable<Number> values, boolean optimizeStorage) throws IOException { meta.writeVInt(field.number); meta.writeByte(NUMBER); meta.writeLong(data.getFilePointer()); long minValue = Long.MAX_VALUE; long maxValue = Long.MIN_VALUE; long gcd = 0; boolean missing = false; // TODO: more efficient? HashSet<Long> uniqueValues = null; if (optimizeStorage) { uniqueValues = new HashSet<>(); long count = 0; for (Number nv : values) { final long v; if (nv == null) { v = 0; missing = true; } else { v = nv.longValue(); } if (gcd != 1) { if (v < Long.MIN_VALUE / 2 || v > Long.MAX_VALUE / 2) { // in that case v - minValue might overflow and make the GCD computation return // wrong results. Since these extreme values are unlikely, we just discard // GCD computation for them gcd = 1; } else if (count != 0) { // minValue needs to be set first gcd = MathUtil.gcd(gcd, v - minValue); } } minValue = Math.min(minValue, v); maxValue = Math.max(maxValue, v); if (uniqueValues != null) { if (uniqueValues.add(v)) { if (uniqueValues.size() > 256) { uniqueValues = null; } } } ++count; } assert count == maxDoc; } if (missing) { long start = data.getFilePointer(); writeMissingBitset(values); meta.writeLong(start); meta.writeLong(data.getFilePointer() - start); } else { meta.writeLong(-1L); } if (uniqueValues != null) { // small number of unique values final int bitsPerValue = PackedInts.bitsRequired(uniqueValues.size()-1); FormatAndBits formatAndBits = PackedInts.fastestFormatAndBits(maxDoc, bitsPerValue, acceptableOverheadRatio); if (formatAndBits.bitsPerValue == 8 && minValue >= Byte.MIN_VALUE && maxValue <= Byte.MAX_VALUE) { meta.writeByte(UNCOMPRESSED); // uncompressed for (Number nv : values) { data.writeByte(nv == null ? 0 : (byte) nv.longValue()); } } else { meta.writeByte(TABLE_COMPRESSED); // table-compressed Long[] decode = uniqueValues.toArray(new Long[uniqueValues.size()]); final HashMap<Long,Integer> encode = new HashMap<Long,Integer>(); data.writeVInt(decode.length); for (int i = 0; i < decode.length; i++) { data.writeLong(decode[i]); encode.put(decode[i], i); } meta.writeVInt(PackedInts.VERSION_CURRENT); data.writeVInt(formatAndBits.format.getId()); data.writeVInt(formatAndBits.bitsPerValue); final PackedInts.Writer writer = PackedInts.getWriterNoHeader(data, formatAndBits.format, maxDoc, formatAndBits.bitsPerValue, PackedInts.DEFAULT_BUFFER_SIZE); for(Number nv : values) { writer.add(encode.get(nv == null ? 0 : nv.longValue())); } writer.finish(); } } else if (gcd != 0 && gcd != 1) { meta.writeByte(GCD_COMPRESSED); meta.writeVInt(PackedInts.VERSION_CURRENT); data.writeLong(minValue); data.writeLong(gcd); data.writeVInt(BLOCK_SIZE); final BlockPackedWriter writer = new BlockPackedWriter(data, BLOCK_SIZE); for (Number nv : values) { long value = nv == null ? 0 : nv.longValue(); writer.add((value - minValue) / gcd); } writer.finish(); } else { meta.writeByte(DELTA_COMPRESSED); // delta-compressed meta.writeVInt(PackedInts.VERSION_CURRENT); data.writeVInt(BLOCK_SIZE); final BlockPackedWriter writer = new BlockPackedWriter(data, BLOCK_SIZE); for (Number nv : values) { writer.add(nv == null ? 0 : nv.longValue()); } writer.finish(); } } @Override public void close() throws IOException { boolean success = false; try { if (meta != null) { meta.writeVInt(-1); // write EOF marker } success = true; } finally { if (success) { IOUtils.close(data, meta); } else { IOUtils.closeWhileHandlingException(data, meta); } } } @Override public void addBinaryField(FieldInfo field, final Iterable<BytesRef> values) throws IOException { // write the byte[] data meta.writeVInt(field.number); meta.writeByte(BYTES); int minLength = Integer.MAX_VALUE; int maxLength = Integer.MIN_VALUE; final long startFP = data.getFilePointer(); boolean missing = false; for(BytesRef v : values) { final int length; if (v == null) { length = 0; missing = true; } else { length = v.length; } if (length > MemoryDocValuesFormat.MAX_BINARY_FIELD_LENGTH) { throw new IllegalArgumentException("DocValuesField \"" + field.name + "\" is too large, must be <= " + MemoryDocValuesFormat.MAX_BINARY_FIELD_LENGTH); } minLength = Math.min(minLength, length); maxLength = Math.max(maxLength, length); if (v != null) { data.writeBytes(v.bytes, v.offset, v.length); } } meta.writeLong(startFP); meta.writeLong(data.getFilePointer() - startFP); if (missing) { long start = data.getFilePointer(); writeMissingBitset(values); meta.writeLong(start); meta.writeLong(data.getFilePointer() - start); } else { meta.writeLong(-1L); } meta.writeVInt(minLength); meta.writeVInt(maxLength); // if minLength == maxLength, its a fixed-length byte[], we are done (the addresses are implicit) // otherwise, we need to record the length fields... if (minLength != maxLength) { meta.writeVInt(PackedInts.VERSION_CURRENT); meta.writeVInt(BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); long addr = 0; for (BytesRef v : values) { if (v != null) { addr += v.length; } writer.add(addr); } writer.finish(); } } private void writeFST(FieldInfo field, Iterable<BytesRef> values) throws IOException { meta.writeVInt(field.number); meta.writeByte(FST); meta.writeLong(data.getFilePointer()); PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton(); Builder<Long> builder = new Builder<Long>(INPUT_TYPE.BYTE1, outputs); IntsRef scratch = new IntsRef(); long ord = 0; for (BytesRef v : values) { builder.add(Util.toIntsRef(v, scratch), ord); ord++; } FST<Long> fst = builder.finish(); if (fst != null) { fst.save(data); } meta.writeVLong(ord); } // TODO: in some cases representing missing with minValue-1 wouldn't take up additional space and so on, // but this is very simple, and algorithms only check this for values of 0 anyway (doesnt slow down normal decode) void writeMissingBitset(Iterable<?> values) throws IOException { long bits = 0; int count = 0; for (Object v : values) { if (count == 64) { data.writeLong(bits); count = 0; bits = 0; } if (v != null) { bits |= 1L << (count & 0x3f); } count++; } if (count > 0) { data.writeLong(bits); } } @Override public void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException { // write the ordinals as numerics addNumericField(field, docToOrd, false); // write the values as FST writeFST(field, values); } // note: this might not be the most efficient... but its fairly simple @Override public void addSortedSetField(FieldInfo field, Iterable<BytesRef> values, final Iterable<Number> docToOrdCount, final Iterable<Number> ords) throws IOException { // write the ordinals as a binary field addBinaryField(field, new Iterable<BytesRef>() { @Override public Iterator<BytesRef> iterator() { return new SortedSetIterator(docToOrdCount.iterator(), ords.iterator()); } }); // write the values as FST writeFST(field, values); } // per-document vint-encoded byte[] static class SortedSetIterator implements Iterator<BytesRef> { byte[] buffer = new byte[10]; ByteArrayDataOutput out = new ByteArrayDataOutput(); BytesRef ref = new BytesRef(); final Iterator<Number> counts; final Iterator<Number> ords; SortedSetIterator(Iterator<Number> counts, Iterator<Number> ords) { this.counts = counts; this.ords = ords; } @Override public boolean hasNext() { return counts.hasNext(); } @Override public BytesRef next() { if (!hasNext()) { throw new NoSuchElementException(); } int count = counts.next().intValue(); int maxSize = count*9; // worst case if (maxSize > buffer.length) { buffer = ArrayUtil.grow(buffer, maxSize); } try { encodeValues(count); } catch (IOException bogus) { throw new RuntimeException(bogus); } ref.bytes = buffer; ref.offset = 0; ref.length = out.getPosition(); return ref; } // encodes count values to buffer private void encodeValues(int count) throws IOException { out.reset(buffer); long lastOrd = 0; for (int i = 0; i < count; i++) { long ord = ords.next().longValue(); out.writeVLong(ord - lastOrd); lastOrd = ord; } } @Override public void remove() { throw new UnsupportedOperationException(); } } }
package com.braintreepayments.demo.test; import androidx.preference.PreferenceManager; import androidx.test.core.app.ApplicationProvider; import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner; import com.braintreepayments.demo.test.utilities.TestHelper; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static com.braintreepayments.AutomatorAction.click; import static com.braintreepayments.AutomatorAction.setText; import static com.braintreepayments.AutomatorAssertion.text; import static com.braintreepayments.DeviceAutomator.onDevice; import static com.braintreepayments.UiObjectMatcher.withContentDescription; import static com.braintreepayments.UiObjectMatcher.withText; import static com.braintreepayments.UiObjectMatcher.withTextContaining; import static com.braintreepayments.UiObjectMatcher.withTextStartingWith; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_AUTHENTICATION_FAILED; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_ISSUER_DOES_NOT_PARTICIPATE; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_ISSUER_DOWN; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_LOOKUP_ERROR; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_LOOKUP_TIMEOUT; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_MPI_SERVICE_ERROR; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_SIGNATURE_VERIFICATION_FAILURE; import static com.braintreepayments.api.CardNumber.THREE_D_SECURE_VERIFICATON; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.endsWith; @RunWith(AndroidJUnit4ClassRunner.class) public class ThreeDSecureVerificationTest extends TestHelper { @Before public void setup() { super.setup(); PreferenceManager.getDefaultSharedPreferences(ApplicationProvider.getApplicationContext()) .edit() .putBoolean("enable_three_d_secure", true) .commit(); onDevice(withText("Credit or Debit Cards")).waitForEnabled().perform(click()); } @Test(timeout = 40000) public void threeDSecure_authenticates() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_VERIFICATON)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); enterThreeDSecurePasswordAndReturnToApp(); getNonceDetails().check(text(containsString("Card Last Two: 02"))); getNonceDetails().check(text(containsString("isLiabilityShifted: true"))); getNonceDetails().check(text(containsString("isLiabilityShiftPossible: true"))); onDevice(withText("Create a Transaction")).perform(click()); onDevice(withTextStartingWith("created")).check(text(endsWith("authorized"))); } @Test(timeout = 40000) public void threeDSecure_authenticationFailed() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_AUTHENTICATION_FAILED)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); enterThreeDSecurePasswordAndReturnToApp(); onDevice(withTextStartingWith("An error occurred")).check(text(containsString("Failed to authenticate, please try a different form of payment"))); } @Test(timeout = 40000) public void threeDSecure_lookupError() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_LOOKUP_ERROR)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); getNonceDetails().check(text(containsString("Card Last Two: 77"))); getNonceDetails().check(text(containsString("isLiabilityShifted: false"))); getNonceDetails().check(text(containsString("isLiabilityShiftPossible: false"))); onDevice(withText("Create a Transaction")).perform(click()); onDevice(withTextStartingWith("created")).check(text(endsWith("authorized"))); } @Test(timeout = 40000) public void threeDSecure_whenIssuerDoesNotParticipate_returnsASuccessfulAuthentication() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_ISSUER_DOES_NOT_PARTICIPATE)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); getNonceDetails().check(text(containsString("Card Last Two: 01"))); getNonceDetails().check(text(containsString("isLiabilityShifted: true"))); getNonceDetails().check(text(containsString("isLiabilityShiftPossible: true"))); getNonceDetails().check(text(containsString("wasVerified: true"))); onDevice(withText("Create a Transaction")).perform(click()); onDevice(withTextStartingWith("created")).check(text(endsWith("authorized"))); } @Test(timeout = 40000) public void threeDSecure_whenSignatureVerificationFails_returnsAFailedAuthentication() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_SIGNATURE_VERIFICATION_FAILURE)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CID")).perform(setText("1234")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); enterThreeDSecurePasswordAndReturnToApp(); onDevice(withTextStartingWith("An error occurred")).check(text(containsString("Failed to authenticate, please try a different form of payment"))); } @Test(timeout = 40000) public void threeDSecure_whenRequired_requestsAuthentication() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_VERIFICATON)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); enterThreeDSecurePasswordAndReturnToApp(); getNonceDetails().check(text(containsString("Card Last Two: 02"))); getNonceDetails().check(text(containsString("isLiabilityShifted: true"))); getNonceDetails().check(text(containsString("isLiabilityShiftPossible: true"))); getNonceDetails().check(text(containsString("wasVerified: true"))); onDevice(withText("Create a Transaction")).perform(click()); onDevice(withTextStartingWith("created")).check(text(endsWith("authorized"))); } @Test(timeout = 40000) public void threeDSecure_whenCardinalReturnsError_returnsAnError() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_MPI_SERVICE_ERROR)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); enterThreeDSecurePasswordAndReturnToApp(); onDevice(withTextStartingWith("An error occurred")).check(text(containsString("An unexpected error occurred"))); } @Test(timeout = 40000) public void threeDSecure_whenWebViewIsClosed_callsCancelListener() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_VERIFICATON)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); onDevice(withText("Authentication")).waitForExists(); onDevice(withContentDescription("Close tab")).perform(click()); onDevice(withTextStartingWith("An error occurred")).check(text(containsString("User canceled 3DS."))); } @Test(timeout = 40000) public void threeDSecure_whenBackIsPressed_callsCancelListener() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_VERIFICATON)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); onDevice(withText("Authentication")).waitForExists(); onDevice().pressBack(); onDevice(withTextStartingWith("An error occurred")).check(text(containsString("User canceled 3DS."))); } @Test(timeout = 40000) public void threeDSecure_whenIssuerDown_returnsAnUnexpectedError() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_ISSUER_DOWN)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); enterThreeDSecurePasswordAndReturnToApp(); onDevice(withTextStartingWith("An error occurred")).check(text(containsString("An unexpected error occurred"))); } @Test(timeout = 50000) public void threeDSecure_doesALookupAndReturnsACardAfterATimeout() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_LOOKUP_TIMEOUT)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); onDevice(withText("Credit or Debit Cards")).waitForExists(20000); getNonceDetails().check(text(containsString("Card Last Two: 44"))); getNonceDetails().check(text(containsString("isLiabilityShifted: false"))); getNonceDetails().check(text(containsString("isLiabilityShiftPossible: false"))); getNonceDetails().check(text(containsString("wasVerified: true"))); onDevice(withText("Create a Transaction")).perform(click()); onDevice(withTextStartingWith("created")).check(text(endsWith("authorized"))); } @Test public void threeDSecure_automaticallyRedirectsBackToAppWith3DSResult() { onDevice(withText("Card Number")).perform(setText(THREE_D_SECURE_VERIFICATON)); onDevice(withText("Expiration Date")).perform(setText(validExpirationText())); onDevice(withText("CVV")).perform(setText("123")); onDevice(withText("Postal Code")).perform(setText("12345")); onDevice(withText("Purchase")).perform(click()); onDevice(withText("Authentication")).waitForExists(); onDevice().typeText("1234"); onDevice(withText("Submit")).perform(click()); onDevice(withTextStartingWith("Card Last Two")).check(text(containsString("wasVerified: true"))); } private void enterThreeDSecurePasswordAndReturnToApp() { onDevice(withText("Authentication")).waitForExists(); onDevice().typeText("1234"); onDevice(withText("Submit")).perform(click()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.*; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Preconditions; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import java.io.*; import java.nio.file.*; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * MetaDataStateFormat is a base class to write checksummed * XContent based files to one or more directories in a standardized directory structure. * @param <T> the type of the XContent base data-structure */ public abstract class MetaDataStateFormat<T> { public static final String STATE_DIR_NAME = "_state"; public static final String STATE_FILE_EXTENSION = ".st"; private static final String STATE_FILE_CODEC = "state"; private static final int STATE_FILE_VERSION = 0; private static final int BUFFER_SIZE = 4096; private final XContentType format; private final boolean deleteOldFiles; /** * Creates a new {@link MetaDataStateFormat} instance * @param format the format of the x-content * @param deleteOldFiles if <code>true</code> write operations will * clean up old files written with this format. */ protected MetaDataStateFormat(XContentType format, boolean deleteOldFiles) { this.format = format; this.deleteOldFiles = deleteOldFiles; } /** * Returns the {@link XContentType} used to serialize xcontent on write. */ public XContentType format() { return format; } /** * Writes the given state to the given directories. The state is written to a * state directory ({@value #STATE_DIR_NAME}) underneath each of the given file locations and is created if it * doesn't exist. The state is serialized to a temporary file in that directory and is then atomically moved to * it's target filename of the pattern <tt>{prefix}{version}.st</tt>. * * @param state the state object to write * @param prefix the state names prefix used to compose the file name. * @param version the version of the state * @param locations the locations where the state should be written to. * @throws IOException if an IOException occurs */ public final void write(final T state, final String prefix, final long version, final Path... locations) throws IOException { Preconditions.checkArgument(locations != null, "Locations must not be null"); Preconditions.checkArgument(locations.length > 0, "One or more locations required"); String fileName = prefix + version + STATE_FILE_EXTENSION; Path stateLocation = locations[0].resolve(STATE_DIR_NAME); Files.createDirectories(stateLocation); final Path tmpStatePath = stateLocation.resolve(fileName + ".tmp"); final Path finalStatePath = stateLocation.resolve(fileName); try { final String resourceDesc = "MetaDataStateFormat.write(path=\"" + tmpStatePath + "\")"; try (OutputStreamIndexOutput out = new OutputStreamIndexOutput(resourceDesc, Files.newOutputStream(tmpStatePath), BUFFER_SIZE)) { CodecUtil.writeHeader(out, STATE_FILE_CODEC, STATE_FILE_VERSION); out.writeInt(format.index()); out.writeLong(version); try (XContentBuilder builder = newXContentBuilder(format, new org.elasticsearch.common.lucene.store.OutputStreamIndexOutput(out) { @Override public void close() throws IOException { // this is important since some of the XContentBuilders write bytes on close. // in order to write the footer we need to prevent closing the actual index input. } })) { builder.startObject(); { toXContent(builder, state); } builder.endObject(); } CodecUtil.writeFooter(out); } IOUtils.fsync(tmpStatePath, false); // fsync the state file Files.move(tmpStatePath, finalStatePath, StandardCopyOption.ATOMIC_MOVE); IOUtils.fsync(stateLocation, true); for (int i = 1; i < locations.length; i++) { stateLocation = locations[i].resolve(STATE_DIR_NAME); Files.createDirectories(stateLocation); Path tmpPath = stateLocation.resolve(fileName + ".tmp"); Path finalPath = stateLocation.resolve(fileName); try { Files.copy(finalStatePath, tmpPath); Files.move(tmpPath, finalPath, StandardCopyOption.ATOMIC_MOVE); // we are on the same FileSystem / Partition here we can do an atomic move IOUtils.fsync(stateLocation, true); // we just fsync the dir here.. } finally { Files.deleteIfExists(tmpPath); } } } finally { Files.deleteIfExists(tmpStatePath); } if (deleteOldFiles) { cleanupOldFiles(prefix, fileName, locations); } } protected XContentBuilder newXContentBuilder(XContentType type, OutputStream stream ) throws IOException { return XContentFactory.contentBuilder(type, stream); } /** * Writes the given state to the given XContentBuilder * Subclasses need to implement this class for theirs specific state. */ public abstract void toXContent(XContentBuilder builder, T state) throws IOException; /** * Reads a new instance of the state from the given XContentParser * Subclasses need to implement this class for theirs specific state. */ public abstract T fromXContent(XContentParser parser) throws IOException; /** * Reads the state from a given file and compares the expected version against the actual version of * the state. */ public final T read(Path file, long expectedVersion) throws IOException { try (Directory dir = newDirectory(file.getParent())) { try (final IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; final long version = indexInput.readLong(); if (version != expectedVersion) { throw new CorruptStateException("State version mismatch expected: " + expectedVersion + " but was: " + version); } long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch(CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } } protected Directory newDirectory(Path dir) throws IOException { return new SimpleFSDirectory(dir); } private void cleanupOldFiles(final String prefix, final String currentStateFile, Path[] locations) throws IOException { final DirectoryStream.Filter<Path> filter = new DirectoryStream.Filter<Path>() { @Override public boolean accept(Path entry) throws IOException { final String entryFileName = entry.getFileName().toString(); return Files.isRegularFile(entry) && entryFileName.startsWith(prefix) // only state files && currentStateFile.equals(entryFileName) == false; // keep the current state file around } }; // now clean up the old files for (Path dataLocation : locations) { try (DirectoryStream<Path> stream = Files.newDirectoryStream(dataLocation.resolve(STATE_DIR_NAME), filter)) { for (Path stateFile : stream) { Files.deleteIfExists(stateFile); } } } } /** * Tries to load the latest state from the given data-locations. It tries to load the latest state determined by * the states version from one or more data directories and if none of the latest states can be loaded an exception * is thrown to prevent accidentally loading a previous state and silently omitting the latest state. * * @param logger an elasticsearch logger instance * @param format the actual metastate format to use * @param pattern the file name pattern to identify files belonging to this pattern and to read the version from. * The first capture group should return the version of the file. If the second capture group is has a * null value the files is considered a legacy file and will be treated as if the file contains a plain * x-content payload. * @param stateType the state type we are loading. used for logging contenxt only. * @param dataLocations the data-locations to try. * @return the latest state or <code>null</code> if no state was found. */ public static <T> T loadLatestState(ESLogger logger, MetaDataStateFormat<T> format, Pattern pattern, String stateType, Path... dataLocations) throws IOException { List<PathAndVersion> files = new ArrayList<>(); long maxVersion = -1; boolean maxVersionIsLegacy = true; if (dataLocations != null) { // select all eligable files first for (Path dataLocation : dataLocations) { final Path stateDir = dataLocation.resolve(STATE_DIR_NAME); if (!Files.exists(stateDir) || !Files.isDirectory(stateDir)) { continue; } // now, iterate over the current versions, and find latest one try (DirectoryStream<Path> paths = Files.newDirectoryStream(stateDir)) { // we don't pass a glob since we need the group part for parsing for (Path stateFile : paths) { final Matcher matcher = pattern.matcher(stateFile.getFileName().toString()); if (matcher.matches()) { final long version = Long.parseLong(matcher.group(1)); maxVersion = Math.max(maxVersion, version); final boolean legacy = MetaDataStateFormat.STATE_FILE_EXTENSION.equals(matcher.group(2)) == false; maxVersionIsLegacy &= legacy; // on purpose, see NOTE below files.add(new PathAndVersion(stateFile, version, legacy)); } } } } } final List<Throwable> exceptions = new ArrayList<>(); T state = null; // NOTE: we might have multiple version of the latest state if there are multiple data dirs.. for this case // we iterate only over the ones with the max version. If we have at least one state file that uses the // new format (ie. legacy == false) then we know that the latest version state ought to use this new format. // In case the state file with the latest version does not use the new format while older state files do, // the list below will be empty and loading the state will fail for (PathAndVersion pathAndVersion : Collections2.filter(files, new VersionAndLegacyPredicate(maxVersion, maxVersionIsLegacy))) { try { final Path stateFile = pathAndVersion.file; final long version = pathAndVersion.version; final XContentParser parser; if (pathAndVersion.legacy) { // read the legacy format -- plain XContent final byte[] data = Files.readAllBytes(stateFile); if (data.length == 0) { logger.debug("{}: no data for [{}], ignoring...", stateType, stateFile.toAbsolutePath()); continue; } parser = XContentHelper.createParser(data, 0, data.length); state = format.fromXContent(parser); if (state == null) { logger.debug("{}: no data for [{}], ignoring...", stateType, stateFile.toAbsolutePath()); } } else { state = format.read(stateFile, version); } return state; } catch (Throwable e) { exceptions.add(e); logger.debug("{}: failed to read [{}], ignoring...", e, pathAndVersion.file.toAbsolutePath(), stateType); } } // if we reach this something went wrong ExceptionsHelper.maybeThrowRuntimeAndSuppress(exceptions); if (files.size() > 0) { // We have some state files but none of them gave us a usable state throw new ElasticsearchIllegalStateException("Could not find a state file to recover from among " + files); } return state; } /** * Filters out all {@link MetaDataStateFormat.PathAndVersion} instances with a different version than * the given one. */ private static final class VersionAndLegacyPredicate implements Predicate<PathAndVersion> { private final long version; private final boolean legacy; VersionAndLegacyPredicate(long version, boolean legacy) { this.version = version; this.legacy = legacy; } @Override public boolean apply(PathAndVersion input) { return input.version == version && input.legacy == legacy; } } /** * Internal struct-like class that holds the parsed state version, the file * and a flag if the file is a legacy state ie. pre 1.5 */ private static class PathAndVersion { final Path file; final long version; final boolean legacy; private PathAndVersion(Path file, long version, boolean legacy) { this.file = file; this.version = version; this.legacy = legacy; } } /** * Deletes all meta state directories recursively for the given data locations * @param dataLocations the data location to delete */ public static void deleteMetaState(Path... dataLocations) throws IOException { Path[] stateDirectories = new Path[dataLocations.length]; for (int i = 0; i < dataLocations.length; i++) { stateDirectories[i] = dataLocations[i].resolve(STATE_DIR_NAME); } IOUtils.rm(stateDirectories); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.appender.mom.jeromq; import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.appender.AbstractManager; import org.apache.logging.log4j.core.appender.ManagerFactory; import org.apache.logging.log4j.core.util.Cancellable; import org.apache.logging.log4j.core.util.ShutdownCallbackRegistry; import org.apache.logging.log4j.util.PropertiesUtil; import org.zeromq.ZMQ; /** * Manager for publishing messages via JeroMq. * * @since 2.6 */ public class JeroMqManager extends AbstractManager { /** * System property to enable shutdown hook. */ public static final String SYS_PROPERTY_ENABLE_SHUTDOWN_HOOK = "log4j.jeromq.enableShutdownHook"; /** * System property to control JeroMQ I/O thread count. */ public static final String SYS_PROPERTY_IO_THREADS = "log4j.jeromq.ioThreads"; private static final JeroMqManagerFactory FACTORY = new JeroMqManagerFactory(); private static final ZMQ.Context CONTEXT; // Retained to avoid garbage collection of the hook private static final Cancellable SHUTDOWN_HOOK; static { LOGGER.trace("JeroMqManager using ZMQ version {}", ZMQ.getVersionString()); final int ioThreads = PropertiesUtil.getProperties().getIntegerProperty(SYS_PROPERTY_IO_THREADS, 1); LOGGER.trace("JeroMqManager creating ZMQ context with ioThreads = {}", ioThreads); CONTEXT = ZMQ.context(ioThreads); final boolean enableShutdownHook = PropertiesUtil.getProperties().getBooleanProperty( SYS_PROPERTY_ENABLE_SHUTDOWN_HOOK, true); if (enableShutdownHook) { SHUTDOWN_HOOK = ((ShutdownCallbackRegistry) LogManager.getFactory()).addShutdownCallback(CONTEXT::close); } else { SHUTDOWN_HOOK = null; } } private final ZMQ.Socket publisher; private JeroMqManager(final String name, final JeroMqConfiguration config) { super(null, name); publisher = CONTEXT.socket(ZMQ.PUB); publisher.setAffinity(config.affinity); publisher.setBacklog(config.backlog); publisher.setDelayAttachOnConnect(config.delayAttachOnConnect); if (config.identity != null) { publisher.setIdentity(config.identity); } publisher.setIPv4Only(config.ipv4Only); publisher.setLinger(config.linger); publisher.setMaxMsgSize(config.maxMsgSize); publisher.setRcvHWM(config.rcvHwm); publisher.setReceiveBufferSize(config.receiveBufferSize); publisher.setReceiveTimeOut(config.receiveTimeOut); publisher.setReconnectIVL(config.reconnectIVL); publisher.setReconnectIVLMax(config.reconnectIVLMax); publisher.setSendBufferSize(config.sendBufferSize); publisher.setSendTimeOut(config.sendTimeOut); publisher.setSndHWM(config.sndHwm); publisher.setTCPKeepAlive(config.tcpKeepAlive); publisher.setTCPKeepAliveCount(config.tcpKeepAliveCount); publisher.setTCPKeepAliveIdle(config.tcpKeepAliveIdle); publisher.setTCPKeepAliveInterval(config.tcpKeepAliveInterval); publisher.setXpubVerbose(config.xpubVerbose); for (final String endpoint : config.endpoints) { publisher.bind(endpoint); } LOGGER.debug("Created JeroMqManager with {}", config); } public boolean send(final byte[] data) { return publisher.send(data); } @Override protected boolean releaseSub(final long timeout, final TimeUnit timeUnit) { publisher.close(); return true; } public static JeroMqManager getJeroMqManager(final String name, final long affinity, final long backlog, final boolean delayAttachOnConnect, final byte[] identity, final boolean ipv4Only, final long linger, final long maxMsgSize, final long rcvHwm, final long receiveBufferSize, final int receiveTimeOut, final long reconnectIVL, final long reconnectIVLMax, final long sendBufferSize, final int sendTimeOut, final long sndHwm, final int tcpKeepAlive, final long tcpKeepAliveCount, final long tcpKeepAliveIdle, final long tcpKeepAliveInterval, final boolean xpubVerbose, final List<String> endpoints) { return getManager(name, FACTORY, new JeroMqConfiguration(affinity, backlog, delayAttachOnConnect, identity, ipv4Only, linger, maxMsgSize, rcvHwm, receiveBufferSize, receiveTimeOut, reconnectIVL, reconnectIVLMax, sendBufferSize, sendTimeOut, sndHwm, tcpKeepAlive, tcpKeepAliveCount, tcpKeepAliveIdle, tcpKeepAliveInterval, xpubVerbose, endpoints)); } public static ZMQ.Context getContext() { return CONTEXT; } private static class JeroMqConfiguration { private final long affinity; private final long backlog; private final boolean delayAttachOnConnect; private final byte[] identity; private final boolean ipv4Only; private final long linger; private final long maxMsgSize; private final long rcvHwm; private final long receiveBufferSize; private final int receiveTimeOut; private final long reconnectIVL; private final long reconnectIVLMax; private final long sendBufferSize; private final int sendTimeOut; private final long sndHwm; private final int tcpKeepAlive; private final long tcpKeepAliveCount; private final long tcpKeepAliveIdle; private final long tcpKeepAliveInterval; private final boolean xpubVerbose; private final List<String> endpoints; private JeroMqConfiguration(final long affinity, final long backlog, final boolean delayAttachOnConnect, final byte[] identity, final boolean ipv4Only, final long linger, final long maxMsgSize, final long rcvHwm, final long receiveBufferSize, final int receiveTimeOut, final long reconnectIVL, final long reconnectIVLMax, final long sendBufferSize, final int sendTimeOut, final long sndHwm, final int tcpKeepAlive, final long tcpKeepAliveCount, final long tcpKeepAliveIdle, final long tcpKeepAliveInterval, final boolean xpubVerbose, final List<String> endpoints) { this.affinity = affinity; this.backlog = backlog; this.delayAttachOnConnect = delayAttachOnConnect; this.identity = identity; this.ipv4Only = ipv4Only; this.linger = linger; this.maxMsgSize = maxMsgSize; this.rcvHwm = rcvHwm; this.receiveBufferSize = receiveBufferSize; this.receiveTimeOut = receiveTimeOut; this.reconnectIVL = reconnectIVL; this.reconnectIVLMax = reconnectIVLMax; this.sendBufferSize = sendBufferSize; this.sendTimeOut = sendTimeOut; this.sndHwm = sndHwm; this.tcpKeepAlive = tcpKeepAlive; this.tcpKeepAliveCount = tcpKeepAliveCount; this.tcpKeepAliveIdle = tcpKeepAliveIdle; this.tcpKeepAliveInterval = tcpKeepAliveInterval; this.xpubVerbose = xpubVerbose; this.endpoints = endpoints; } @Override public String toString() { return "JeroMqConfiguration{" + "affinity=" + affinity + ", backlog=" + backlog + ", delayAttachOnConnect=" + delayAttachOnConnect + ", identity=" + Arrays.toString(identity) + ", ipv4Only=" + ipv4Only + ", linger=" + linger + ", maxMsgSize=" + maxMsgSize + ", rcvHwm=" + rcvHwm + ", receiveBufferSize=" + receiveBufferSize + ", receiveTimeOut=" + receiveTimeOut + ", reconnectIVL=" + reconnectIVL + ", reconnectIVLMax=" + reconnectIVLMax + ", sendBufferSize=" + sendBufferSize + ", sendTimeOut=" + sendTimeOut + ", sndHwm=" + sndHwm + ", tcpKeepAlive=" + tcpKeepAlive + ", tcpKeepAliveCount=" + tcpKeepAliveCount + ", tcpKeepAliveIdle=" + tcpKeepAliveIdle + ", tcpKeepAliveInterval=" + tcpKeepAliveInterval + ", xpubVerbose=" + xpubVerbose + ", endpoints=" + endpoints + '}'; } } private static class JeroMqManagerFactory implements ManagerFactory<JeroMqManager, JeroMqConfiguration> { @Override public JeroMqManager createManager(final String name, final JeroMqConfiguration data) { return new JeroMqManager(name, data); } } }
package com.planet_ink.coffee_mud.Abilities.Common; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftParms; import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftingActivity; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings({"unchecked","rawtypes"}) public class Torturesmithing extends CraftingSkill implements ItemCraftor { @Override public String ID() { return "Torturesmithing"; } private final static String localizedName = CMLib.lang()._("Torturesmithing"); @Override public String name() { return localizedName; } private static final String[] triggerStrings =_i(new String[] {"TORTURESMITH","TORTURESMITHING"}); @Override public String[] triggerStrings(){return triggerStrings;} @Override public String supportedResourceString(){return "METAL|MITHRIL|CLOTH";} @Override public String parametersFormat(){ return "ITEM_NAME\tITEM_LEVEL\tBUILD_TIME_TICKS\tMATERIALS_REQUIRED\t" +"ITEM_BASE_VALUE\tITEM_CLASS_ID\t" +"LID_LOCK||CONTAINER_TYPE||RIDE_BASIS||WEAPON_CLASS||CODED_WEAR_LOCATION\t" +"CONTAINER_CAPACITY||LIQUID_CAPACITY\t" +"BASE_ARMOR_AMOUNT\tWOOD_METAL_CLOTH\tCODED_SPELL_LIST";} //protected static final int RCP_FINALNAME=0; //protected static final int RCP_LEVEL=1; //protected static final int RCP_TICKS=2; protected static final int RCP_WOOD=3; protected static final int RCP_VALUE=4; protected static final int RCP_CLASSTYPE=5; protected static final int RCP_MISCTYPE=6; protected static final int RCP_CAPACITY=7; protected static final int RCP_ARMORDMG=8; protected static final int RCP_MATERIAL=9; protected static final int RCP_SPELL=10; @Override public String parametersFile(){ return "torturesmith.txt";} @Override protected List<List<String>> loadRecipes(){return super.loadRecipes(parametersFile());} @Override public void unInvoke() { if(canBeUninvoked()) { if((affected!=null)&&(affected instanceof MOB)) { final MOB mob=(MOB)affected; if((buildingI!=null)&&(!aborted)) { if(messedUp) { if(activity == CraftingActivity.LEARNING) commonEmote(mob,"<S-NAME> fail(s) to learn how to make "+buildingI.name()+"."); else commonTell(mob,_("You've ruined @x1!",buildingI.name(mob))); buildingI.destroy(); } else if(activity==CraftingActivity.LEARNING) { deconstructRecipeInto( buildingI, recipeHolder ); buildingI.destroy(); } else dropAWinner(mob,buildingI); } buildingI=null; } } super.unInvoke(); } @Override public boolean supportsDeconstruction() { return true; } @Override public boolean mayICraft(final Item I) { if(I==null) return false; if(!super.mayBeCrafted(I)) return false; if(!CMLib.flags().isDeadlyOrMaliciousEffect(I)) return false; if(I instanceof Ammunition) return false; if(I instanceof Rideable) return true; if(I instanceof Shield) return false; if(I instanceof Weapon) return (isANativeItem(I.Name())); if(I instanceof Armor) return true; if(I instanceof FalseLimb) return true; if(I.rawProperLocationBitmap()==Wearable.WORN_HELD) return true; return (isANativeItem(I.Name())); } public boolean supportsMending(Physical I){ return canMend(null,I,true);} @Override protected boolean canMend(MOB mob, Environmental E, boolean quiet) { if(!super.canMend(mob,E,quiet)) return false; if((!(E instanceof Item)) ||(!mayICraft((Item)E))) { if(!quiet) commonTell(mob,_("That's not a torturesmithing item.")); return false; } return true; } @Override public String getDecodedComponentsDescription(final MOB mob, final List<String> recipe) { return super.getComponentDescription( mob, recipe, RCP_WOOD ); } @Override public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel) { if(super.checkStop(mob, commands)) return true; final CraftParms parsedVars=super.parseAutoGenerate(auto,givenTarget,commands); givenTarget=parsedVars.givenTarget; randomRecipeFix(mob,addRecipes(mob,loadRecipes()),commands,parsedVars.autoGenerate); if(commands.size()==0) { commonTell(mob,_("Make what? Enter \"@x1 list\" for a list, \"@x2 learn <item>\" to gain recipes, or \"@x3 stop\" to cancel.",triggerStrings()[0].toLowerCase(),triggerStrings()[0].toLowerCase(),triggerStrings()[0].toLowerCase())); return false; } final List<List<String>> recipes=addRecipes(mob,loadRecipes()); final String str=(String)commands.elementAt(0); String startStr=null; bundling=false; int duration=4; if(str.equalsIgnoreCase("list")) { String mask=CMParms.combine(commands,1); boolean allFlag=false; if(mask.equalsIgnoreCase("all")) { allFlag=true; mask=""; } final StringBuffer buf=new StringBuffer(_("@x1 Lvl Material required\n\r",CMStrings.padRight(_("Item"),16))); for(int r=0;r<recipes.size();r++) { final List<String> V=recipes.get(r); if(V.size()>0) { final String item=replacePercent(V.get(RCP_FINALNAME),""); final int level=CMath.s_int(V.get(RCP_LEVEL)); String mat=V.get(RCP_MATERIAL); final String wood=getComponentDescription(mob,V,RCP_WOOD); if(wood.length()>5) mat=""; if(((level<=xlevel(mob))||allFlag) &&((mask.length()==0)||mask.equalsIgnoreCase("all")||CMLib.english().containsString(item,mask))) buf.append(CMStrings.padRight(item,16)+" "+CMStrings.padRight(""+level,3)+" "+wood+" "+mat.toLowerCase()+"\n\r"); } } commonTell(mob,buf.toString()); return true; } else if((commands.firstElement() instanceof String)&&(((String)commands.firstElement())).equalsIgnoreCase("learn")) { return doLearnRecipe(mob, commands, givenTarget, auto, asLevel); } activity = CraftingActivity.CRAFTING; buildingI=null; messedUp=false; int amount=-1; if((commands.size()>1)&&(CMath.isNumber((String)commands.lastElement()))) { amount=CMath.s_int((String)commands.lastElement()); commands.removeElementAt(commands.size()-1); } final String recipeName=CMParms.combine(commands,0); List<String> foundRecipe=null; final List<List<String>> matches=matchingRecipeNames(recipes,recipeName,true); for(int r=0;r<matches.size();r++) { final List<String> V=matches.get(r); if(V.size()>0) { final int level=CMath.s_int(V.get(RCP_LEVEL)); if((parsedVars.autoGenerate>0)||(level<=xlevel(mob))) { foundRecipe=V; break; } } } if(foundRecipe==null) { commonTell(mob,_("You don't know how to make a '@x1'. Try \"@x2 list\" for a list.",recipeName,triggerStrings[0].toLowerCase())); return false; } final String woodRequiredStr = foundRecipe.get(RCP_WOOD); final List<Object> componentsFoundList=getAbilityComponents(mob, woodRequiredStr, "make "+CMLib.english().startWithAorAn(recipeName),parsedVars.autoGenerate); if(componentsFoundList==null) return false; int woodRequired=CMath.s_int(woodRequiredStr); woodRequired=adjustWoodRequired(woodRequired,mob); if(amount>woodRequired) woodRequired=amount; final String misctype=foundRecipe.get(RCP_MISCTYPE); final String materialtype=foundRecipe.get(RCP_MATERIAL); int[] pm=null; if(materialtype.equalsIgnoreCase("wood")) { pm=new int[1]; pm[0]=RawMaterial.MATERIAL_WOODEN; } else if(materialtype.equalsIgnoreCase("metal")) { pm=new int[2]; pm[0]=RawMaterial.MATERIAL_METAL; pm[1]=RawMaterial.MATERIAL_MITHRIL; } else if(materialtype.equalsIgnoreCase("cloth")) { pm=new int[1]; pm[0]=RawMaterial.MATERIAL_CLOTH; } bundling=misctype.equalsIgnoreCase("BUNDLE"); final int[][] data=fetchFoundResourceData(mob, woodRequired,"wood or cloth",pm, 0,null,null, bundling, parsedVars.autoGenerate, null); if(data==null) return false; woodRequired=data[0][FOUND_AMT]; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final int lostValue=parsedVars.autoGenerate>0?0: CMLib.materials().destroyResourcesValue(mob.location(),data[0][FOUND_AMT],data[0][FOUND_CODE],0,null) +CMLib.ableMapper().destroyAbilityComponents(componentsFoundList); buildingI=CMClass.getItem(foundRecipe.get(RCP_CLASSTYPE)); if(buildingI==null) { commonTell(mob,_("There's no such thing as a @x1!!!",foundRecipe.get(RCP_CLASSTYPE))); return false; } duration=getDuration(CMath.s_int(foundRecipe.get(RCP_TICKS)),mob,CMath.s_int(foundRecipe.get(RCP_LEVEL)),4); String itemName=replacePercent(foundRecipe.get(RCP_FINALNAME),RawMaterial.CODES.NAME(data[0][FOUND_CODE])).toLowerCase(); if(bundling) itemName="a "+woodRequired+"# "+itemName; else itemName=CMLib.english().startWithAorAn(itemName); buildingI.setName(itemName); startStr=_("<S-NAME> start(s) making @x1.",buildingI.name()); displayText=_("You are making @x1",buildingI.name()); verb=_("making @x1",buildingI.name()); playSound="hammer.wav"; buildingI.setDisplayText(_("@x1 lies here",itemName)); buildingI.setDescription(itemName+". "); buildingI.basePhyStats().setWeight(woodRequired); buildingI.setBaseValue(CMath.s_int(foundRecipe.get(RCP_VALUE))+(woodRequired*(RawMaterial.CODES.VALUE(data[0][FOUND_CODE])))); buildingI.setMaterial(data[0][FOUND_CODE]); buildingI.basePhyStats().setLevel(CMath.s_int(foundRecipe.get(RCP_LEVEL))); buildingI.setSecretIdentity(getBrand(mob)); final int capacity=CMath.s_int(foundRecipe.get(RCP_CAPACITY)); final int armordmg=CMath.s_int(foundRecipe.get(RCP_ARMORDMG)); final int hardness=RawMaterial.CODES.HARDNESS(data[0][FOUND_CODE])-3; final String spell=(foundRecipe.size()>RCP_SPELL)?foundRecipe.get(RCP_SPELL).trim():""; addSpells(buildingI,spell); if(buildingI instanceof Container) { ((Container)buildingI).setCapacity(capacity+woodRequired); if(misctype.equalsIgnoreCase("LID")) ((Container)buildingI).setLidsNLocks(true,false,false,false); else if(misctype.equalsIgnoreCase("LOCK")) { ((Container)buildingI).setLidsNLocks(true,false,true,false); ((Container)buildingI).setKeyName(Double.toString(Math.random())); } else ((Container)buildingI).setContainTypes(getContainerType(misctype)); } if(buildingI instanceof Rideable) { setRideBasis((Rideable)buildingI,misctype); if(capacity==0) ((Rideable)buildingI).setRiderCapacity(1); else if(capacity<5) ((Rideable)buildingI).setRiderCapacity(capacity); } if((buildingI instanceof Armor)&&(!(buildingI instanceof FalseLimb))) { ((Armor)buildingI).basePhyStats().setArmor(0); if(armordmg!=0) ((Armor)buildingI).basePhyStats().setArmor(armordmg+(abilityCode()-1)); setWearLocation(buildingI,misctype,hardness); } if(buildingI instanceof Drink) { if(CMLib.flags().isGettable(buildingI)) { ((Drink)buildingI).setLiquidHeld(capacity*50); ((Drink)buildingI).setThirstQuenched(250); if((capacity*50)<250) ((Drink)buildingI).setThirstQuenched(capacity*50); ((Drink)buildingI).setLiquidRemaining(0); } } if(bundling) buildingI.setBaseValue(lostValue); buildingI.recoverPhyStats(); buildingI.text(); buildingI.recoverPhyStats(); messedUp=!proficiencyCheck(mob,0,auto); if(parsedVars.autoGenerate>0) { commands.addElement(buildingI); return true; } final CMMsg msg=CMClass.getMsg(mob,buildingI,this,getActivityMessageType(),startStr); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); buildingI=(Item)msg.target(); beneficialAffect(mob,mob,asLevel,duration); } return true; } }
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.jdk7.rabbitmq.spring; import com.navercorp.pinpoint.bootstrap.plugin.test.Expectations; import com.navercorp.pinpoint.bootstrap.plugin.test.ExpectedTrace; import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.plugin.AgentPath; import com.navercorp.pinpoint.plugin.jdk7.rabbitmq.util.RabbitMQTestConstants; import com.navercorp.pinpoint.plugin.jdk7.rabbitmq.util.TestBroker; import com.navercorp.pinpoint.test.plugin.Dependency; import com.navercorp.pinpoint.test.plugin.JvmVersion; import com.navercorp.pinpoint.test.plugin.PinpointAgent; import com.navercorp.pinpoint.test.plugin.PinpointConfig; import com.navercorp.pinpoint.test.plugin.PinpointPluginTestSuite; import com.navercorp.test.pinpoint.plugin.rabbitmq.PropagationMarker; import com.navercorp.test.pinpoint.plugin.rabbitmq.spring.config.CommonConfig; import com.navercorp.test.pinpoint.plugin.rabbitmq.spring.config.MessageListenerConfig_Post_1_4_0; import com.navercorp.test.pinpoint.plugin.rabbitmq.spring.config.ReceiverConfig_Post_1_6_0; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Consumer; import com.rabbitmq.client.Envelope; import com.rabbitmq.client.impl.AMQCommand; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.amqp.core.Message; import java.lang.reflect.Constructor; import java.lang.reflect.Method; /** * Spring-amqp rabbit 2.1.0 removed previously added <tt>BlockingQueueConsumer$ConsumerDecorator</tt>. * <p> * Skip 2.1.1.RELEASE as it has spring 5.1.2.BUILD-SNAPSHOT dependencies not in maven central. * * @author HyunGil Jeong */ @RunWith(PinpointPluginTestSuite.class) @PinpointAgent(AgentPath.PATH) @PinpointConfig("rabbitmq/client/pinpoint-rabbitmq.config") @Dependency({"org.springframework.amqp:spring-rabbit:[2.1.0.RELEASE,2.1.1.RELEASE),(2.1.1.RELEASE,)", "com.fasterxml.jackson.core:jackson-core:2.8.11", "org.apache.qpid:qpid-broker:6.1.1"}) @JvmVersion(8) public class SpringAmqpRabbit_2_1_x_to_2_x_IT { private static final TestBroker BROKER = new TestBroker(); private static final TestApplicationContext CONTEXT = new TestApplicationContext(); private final SpringAmqpRabbitTestRunner testRunner = new SpringAmqpRabbitTestRunner(CONTEXT); @BeforeClass public static void setUpBeforeClass() throws Exception { BROKER.start(); CONTEXT.init( CommonConfig.class, MessageListenerConfig_Post_1_4_0.class, ReceiverConfig_Post_1_6_0.class); } @AfterClass public static void tearDownAfterClass() { CONTEXT.close(); BROKER.shutdown(); } @Test public void testPush() throws Exception { final String remoteAddress = testRunner.getRemoteAddress(); Class<?> rabbitTemplateClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate"); Method rabbitTemplateConvertAndSend = rabbitTemplateClass.getDeclaredMethod("convertAndSend", String.class, String.class, Object.class); ExpectedTrace rabbitTemplateConvertAndSendTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateConvertAndSend); // method // automatic recovery deliberately disabled as Spring has it's own recovery mechanism Class<?> channelNClass = Class.forName("com.rabbitmq.client.impl.ChannelN"); Method channelNBasicPublish = channelNClass.getDeclaredMethod("basicPublish", String.class, String.class, boolean.class, boolean.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace channelNBasicPublishTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType channelNBasicPublish, // method null, // rpc remoteAddress, // endPoint "exchange-" + RabbitMQTestConstants.EXCHANGE, // destinationId Expectations.annotation("rabbitmq.exchange", RabbitMQTestConstants.EXCHANGE), Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PUSH)); ExpectedTrace rabbitMqConsumerInvocationTrace = Expectations.root( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType "RabbitMQ Consumer Invocation", // method "rabbitmq://exchange=" + RabbitMQTestConstants.EXCHANGE, // rpc null, // endPoint (collected but API to retrieve local address is not available in all versions, so skip) remoteAddress, // remoteAddress Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PUSH)); Class<?> consumerDispatcherClass = Class.forName("com.rabbitmq.client.impl.ConsumerDispatcher"); Method consumerDispatcherHandleDelivery = consumerDispatcherClass.getDeclaredMethod("handleDelivery", Consumer.class, String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace consumerDispatcherHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType consumerDispatcherHandleDelivery); // method ExpectedTrace asynchronousInvocationTrace = Expectations.event( ServiceType.ASYNC.getName(), "Asynchronous Invocation"); Class<?> blockingQueueConsumerInternalConsumerClass = Class.forName("org.springframework.amqp.rabbit.listener.BlockingQueueConsumer$InternalConsumer"); Method blockingQueueConsumerInternalConsumerHandleDelivery = blockingQueueConsumerInternalConsumerClass.getDeclaredMethod("handleDelivery", String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace blockingQueueConsumerInternalConsumerHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType blockingQueueConsumerInternalConsumerHandleDelivery); Class<?> deliveryClass = Class.forName("org.springframework.amqp.rabbit.support.Delivery"); Constructor<?> deliveryConstructor = deliveryClass.getDeclaredConstructor(String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class, String.class); ExpectedTrace deliveryConstructorTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType deliveryConstructor); Class<?> abstractMessageListenerContainerClass = Class.forName("org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer"); Method abstractMessageListenerContainerExecuteListener = abstractMessageListenerContainerClass.getDeclaredMethod("executeListener", Channel.class, Message.class); ExpectedTrace abstractMessageListenerContainerExecuteListenerTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), abstractMessageListenerContainerExecuteListener); Class<?> propagationMarkerClass = PropagationMarker.class; Method propagationMarkerMark = propagationMarkerClass.getDeclaredMethod("mark"); ExpectedTrace markTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), propagationMarkerMark); ExpectedTrace[] producerTraces = { rabbitTemplateConvertAndSendTrace, channelNBasicPublishTrace }; ExpectedTrace[] consumerTraces = { rabbitMqConsumerInvocationTrace, consumerDispatcherHandleDeliveryTrace, asynchronousInvocationTrace, blockingQueueConsumerInternalConsumerHandleDeliveryTrace, deliveryConstructorTrace, asynchronousInvocationTrace, abstractMessageListenerContainerExecuteListenerTrace, markTrace }; final int expectedTraceCount = producerTraces.length + consumerTraces.length; final PluginTestVerifier verifier = testRunner.runPush(expectedTraceCount); verifier.verifyDiscreteTrace(producerTraces); verifier.verifyDiscreteTrace(consumerTraces); verifier.verifyTraceCount(0); } @Test public void testPull() throws Exception { final String remoteAddress = testRunner.getRemoteAddress(); Class<?> rabbitTemplateClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate"); // verify queue-initiated traces Method rabbitTemplateConvertAndSend = rabbitTemplateClass.getDeclaredMethod("convertAndSend", String.class, String.class, Object.class); ExpectedTrace rabbitTemplateConvertAndSendTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateConvertAndSend); // method // automatic recovery deliberately disabled as Spring has it's own recovery mechanism Class<?> channelNClass = Class.forName("com.rabbitmq.client.impl.ChannelN"); Method channelNBasicPublish = channelNClass.getDeclaredMethod("basicPublish", String.class, String.class, boolean.class, boolean.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace channelNBasicPublishTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType channelNBasicPublish, // method null, // rpc remoteAddress, // endPoint "exchange-" + RabbitMQTestConstants.EXCHANGE, // destinationId Expectations.annotation("rabbitmq.exchange", RabbitMQTestConstants.EXCHANGE), Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); ExpectedTrace rabbitMqConsumerInvocationTrace = Expectations.root( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType "RabbitMQ Consumer Invocation", // method "rabbitmq://exchange=" + RabbitMQTestConstants.EXCHANGE, // rpc null, // endPoint (collected but API to retrieve local address is not available in all versions, so skip) remoteAddress, // remoteAddress Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); Class<?> amqChannelClass = Class.forName("com.rabbitmq.client.impl.AMQChannel"); Method amqChannelHandleCompleteInboundCommand = amqChannelClass.getDeclaredMethod("handleCompleteInboundCommand", AMQCommand.class); ExpectedTrace amqChannelHandleCompleteInboundCommandTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // method amqChannelHandleCompleteInboundCommand); ExpectedTrace[] producerTraces = { rabbitTemplateConvertAndSendTrace, channelNBasicPublishTrace }; ExpectedTrace[] consumerTraces = { rabbitMqConsumerInvocationTrace, amqChannelHandleCompleteInboundCommandTrace }; // verify client-initiated traces Method rabbitTemplateReceive = rabbitTemplateClass.getDeclaredMethod("receive", String.class); ExpectedTrace rabbitTemplateReceiveTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateReceive); // method Method channelNBasicGet = channelNClass.getDeclaredMethod("basicGet", String.class, boolean.class); ExpectedTrace channelNBasicGetTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, channelNBasicGet); Class<?> propagationMarkerClass = PropagationMarker.class; Method propagationMarkerMark = propagationMarkerClass.getDeclaredMethod("mark"); ExpectedTrace markTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), propagationMarkerMark); ExpectedTrace[] clientInitiatedTraces = { rabbitTemplateReceiveTrace, channelNBasicGetTrace, markTrace }; final int expectedTraceCount = producerTraces.length + consumerTraces.length + clientInitiatedTraces.length; final PluginTestVerifier verifier = testRunner.runPull(expectedTraceCount); verifier.verifyDiscreteTrace(producerTraces); verifier.verifyDiscreteTrace(consumerTraces); verifier.verifyDiscreteTrace(clientInitiatedTraces); verifier.verifyTraceCount(0); } @Test public void testPullWithTimeout() throws Exception { final String remoteAddress = testRunner.getRemoteAddress(); Class<?> rabbitTemplateClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate"); // verify queue-initiated traces Method rabbitTemplateConvertAndSend = rabbitTemplateClass.getDeclaredMethod("convertAndSend", String.class, String.class, Object.class); ExpectedTrace rabbitTemplateConvertAndSendTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateConvertAndSend); // method // automatic recovery deliberately disabled as Spring has it's own recovery mechanism Class<?> channelNClass = Class.forName("com.rabbitmq.client.impl.ChannelN"); Method channelNBasicPublish = channelNClass.getDeclaredMethod("basicPublish", String.class, String.class, boolean.class, boolean.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace channelNBasicPublishTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType channelNBasicPublish, // method null, // rpc remoteAddress, // endPoint "exchange-" + RabbitMQTestConstants.EXCHANGE, // destinationId Expectations.annotation("rabbitmq.exchange", RabbitMQTestConstants.EXCHANGE), Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); ExpectedTrace rabbitMqConsumerInvocationTrace = Expectations.root( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType "RabbitMQ Consumer Invocation", // method "rabbitmq://exchange=" + RabbitMQTestConstants.EXCHANGE, // rpc null, // endPoint (collected but API to retrieve local address is not available in all versions, so skip) remoteAddress, // remoteAddress Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); Class<?> consumerDispatcherClass = Class.forName("com.rabbitmq.client.impl.ConsumerDispatcher"); Method consumerDispatcherHandleDelivery = consumerDispatcherClass.getDeclaredMethod("handleDelivery", Consumer.class, String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace consumerDispatcherHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType consumerDispatcherHandleDelivery); // method ExpectedTrace asynchronousInvocationTrace = Expectations.event( ServiceType.ASYNC.getName(), "Asynchronous Invocation"); // RabbitTemplate internal consumer implementation - may change in future versions which will cause tests to // fail, in which case the integration test needs to be updated to match code changes Class<?> rabbitTemplateInternalConsumerClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate$2"); Method rabbitTemplateInternalConsumerHandleDelivery = rabbitTemplateInternalConsumerClass.getDeclaredMethod("handleDelivery", String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace rabbitTemplateInternalConsumerHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateInternalConsumerHandleDelivery); // method Class<?> deliveryClass = Class.forName("org.springframework.amqp.rabbit.support.Delivery"); Constructor<?> deliveryConstructor = deliveryClass.getDeclaredConstructor(String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class, String.class); ExpectedTrace deliveryConstructorTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType deliveryConstructor); ExpectedTrace[] producerTraces = { rabbitTemplateConvertAndSendTrace, channelNBasicPublishTrace }; ExpectedTrace[] consumerTraces = { rabbitMqConsumerInvocationTrace, consumerDispatcherHandleDeliveryTrace, asynchronousInvocationTrace, rabbitTemplateInternalConsumerHandleDeliveryTrace, deliveryConstructorTrace }; // verify client-initiated traces Method rabbitTemplateReceive = rabbitTemplateClass.getDeclaredMethod("receive", String.class, long.class); ExpectedTrace rabbitTemplateReceiveTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateReceive); // method Class<?> propagationMarkerClass = PropagationMarker.class; Method propagationMarkerMark = propagationMarkerClass.getDeclaredMethod("mark"); ExpectedTrace markTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), propagationMarkerMark); ExpectedTrace[] clientInitiatedTraces = { rabbitTemplateReceiveTrace, markTrace }; final int expectedTraceCount = producerTraces.length + consumerTraces.length + clientInitiatedTraces.length; final PluginTestVerifier verifier = testRunner.runPull(expectedTraceCount, 5000L); verifier.verifyDiscreteTrace(producerTraces); verifier.verifyDiscreteTrace(consumerTraces); verifier.verifyDiscreteTrace(clientInitiatedTraces); verifier.verifyTraceCount(0); } }
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.hbase; import static com.google.cloud.bigtable.hbase.test_env.SharedTestEnvRule.COLUMN_FAMILY; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Random; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; public class TestBasicOps extends AbstractTest { /** Happy path for a single value. */ @Test public void testPutGetDelete() throws IOException { // Initialize byte[] rowKey = dataHelper.randomData("testrow-"); byte[] testQualifier = dataHelper.randomData("testQualifier-"); byte[] testValue = dataHelper.randomData("testValue-"); testPutGetDelete(true, rowKey, testQualifier, testValue); } /** Requirement 1.2 - Rowkey, family, qualifer, and value are byte[] */ @Test public void testBinaryPutGetDelete() throws IOException { // Initialize Random random = new Random(); byte[] rowKey = new byte[100]; random.nextBytes(rowKey); byte[] testQualifier = new byte[100]; random.nextBytes(testQualifier); byte[] testValue = new byte[100]; random.nextBytes(testValue); // TODO(carterpage) - test that column-family can work as raw binary // Put testPutGetDelete(true, rowKey, testQualifier, testValue); } /** * Requirement 1.9 - Referring to a column without the qualifier implicitly sets a special "empty" * qualifier. */ @Test public void testNullQualifier() throws IOException { // Initialize values Table table = getDefaultTable(); byte[] rowKey = dataHelper.randomData("testrow-"); byte[] testValue = dataHelper.randomData("testValue-"); // Insert value with null qualifier Put put = new Put(rowKey); put.addColumn(COLUMN_FAMILY, null, testValue); table.put(put); // This is treated the same as an empty String (which is just an empty byte array). Get get = new Get(rowKey); get.addColumn(COLUMN_FAMILY, Bytes.toBytes("")); Result result = table.get(get); Assert.assertEquals(1, result.size()); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, null)); Assert.assertArrayEquals( testValue, CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, null))); // Get as a null. This should work. get = new Get(rowKey); get.addColumn(COLUMN_FAMILY, null); result = table.get(get); Assert.assertEquals(1, result.size()); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, null)); Assert.assertArrayEquals( testValue, CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, null))); // This should return when selecting the whole family too. get = new Get(rowKey); get.addFamily(COLUMN_FAMILY); result = table.get(get); Assert.assertEquals(1, result.size()); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, null)); Assert.assertArrayEquals( testValue, CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, null))); // Delete Delete delete = new Delete(rowKey); delete.addColumns(COLUMN_FAMILY, null); table.delete(delete); // Confirm deleted Assert.assertFalse(table.exists(get)); table.close(); } /** * Requirement 2.4 - Maximum cell size is 10MB by default. Can be overriden using * hbase.client.keyvalue.maxsize property. * * <p>Cell size includes value and key info, so the value needs to a bit less than the max to * work. */ @Test @Category(KnownEmulatorGap.class) public void testPutGetBigValue() throws IOException { testPutGetDeleteExists((10 << 20) - 1024, false, true); // 10 MB - 1kB } /** * Test a put without a get. This will help allow us to see performance differences between put * alone and put/get. There are (or hopefully were, by the time this is read), performance issues * with testBigValue. The profile for put (uploading) is different from the profile for get * (downloading). We need a way to see where the issue is. */ @Test @Category(KnownEmulatorGap.class) public void testPutBigValue() throws IOException { testPutGetDeleteExists((10 << 20) - 1024, false, false); // 10 MB - 1kB } /** * Requirement 2.4 - Maximum cell size is 10MB by default. Can be overridden using * hbase.client.keyvalue.maxsize property. * * <p>Ensure the failure case. */ @Test(expected = IllegalArgumentException.class) @Category(KnownEmulatorGap.class) public void testPutTooBigValue() throws IOException { testPutGetDeleteExists((10 << 20) + 1 + 4, true, true); // 10 MB + 1 } @Test @Category(KnownEmulatorGap.class) public void testPutAlmostTooBigValue() throws IOException { testPutGetDeleteExists(10 << 20, true, true); // 10 MB } @Test @Category(KnownEmulatorGap.class) /** Run a large value ten times for performance logging purposes */ public void testPutAlmostTooBigValueTenTimes() throws IOException { for (int i = 0; i < 10; i++) { long start = System.currentTimeMillis(); testPutGetDeleteExists(10 << 20, true, true); // 10 MB if (System.currentTimeMillis() - start > 5_000) { // If this is a slow connection, don't bother doing a performance test. break; } } } void testPutGetDeleteExists(int size, boolean removeMetadataSize, boolean doGet) throws IOException { // Initialize variables byte[] testRowKey = dataHelper.randomData("testrow-"); byte[] testQualifier = dataHelper.randomData("testQualifier-"); int valueSize = size; if (removeMetadataSize) { // looks like in hbase 2.0 Cell size increased by 4. TODO verify it. int metadataSize = (20 + 4 + testRowKey.length + COLUMN_FAMILY.length + testQualifier.length); valueSize -= metadataSize; } byte[] testValue = new byte[valueSize]; new Random().nextBytes(testValue); testPutGetDelete(doGet, testRowKey, testQualifier, testValue); } private void testPutGetDelete( boolean doGet, byte[] rowKey, byte[] testQualifier, byte[] testValue) throws IOException { Table table = getDefaultTable(); Stopwatch stopwatch = new Stopwatch(); // Put Put put = new Put(rowKey); put.addColumn(COLUMN_FAMILY, testQualifier, testValue); table.put(put); stopwatch.print("Put took %d ms"); // Get Get get = new Get(rowKey); get.addColumn(COLUMN_FAMILY, testQualifier); // Do the get on some tests, but not others. The rationale for that is to do performance // testing on large values. if (doGet) { Result result = table.get(get); stopwatch.print("Get took %d ms"); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, testQualifier)); List<Cell> cells = result.getColumnCells(COLUMN_FAMILY, testQualifier); Assert.assertEquals(1, cells.size()); Assert.assertTrue(Arrays.equals(testValue, CellUtil.cloneValue(cells.get(0)))); stopwatch.print("Verifying took %d ms"); } // Delete Delete delete = new Delete(rowKey); delete.addColumns(COLUMN_FAMILY, testQualifier); table.delete(delete); stopwatch.print("Delete took %d ms"); // Confirm deleted Assert.assertFalse(table.exists(get)); stopwatch.print("Exists took %d ms"); table.close(); stopwatch.print("close took %d ms"); } private class Stopwatch { long lastCheckin = System.currentTimeMillis(); private void print(String string) { long now = System.currentTimeMillis(); logger.info(string, now - lastCheckin); lastCheckin = now; } } }
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package okhttp3.internal.framed; import okhttp3.internal.Util; import java.io.IOException; import java.util.Arrays; import java.util.List; import okio.Buffer; import okio.BufferedSink; import okio.BufferedSource; import okio.ByteString; import okio.GzipSink; import okio.Okio; import org.junit.Assert; import org.junit.Test; import static okhttp3.TestUtil.headerEntries; import static okhttp3.internal.framed.Http2.FLAG_COMPRESSED; import static okhttp3.internal.framed.Http2.FLAG_END_HEADERS; import static okhttp3.internal.framed.Http2.FLAG_END_STREAM; import static okhttp3.internal.framed.Http2.FLAG_NONE; import static okhttp3.internal.framed.Http2.FLAG_PADDED; import static okhttp3.internal.framed.Http2.FLAG_PRIORITY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class Http2Test { final Buffer frame = new Buffer(); final FrameReader fr = new Http2.Reader(frame, 4096, false); final int expectedStreamId = 15; @Test public void unknownFrameTypeSkipped() throws IOException { writeMedium(frame, 4); // has a 4-byte field frame.writeByte(99); // type 99 frame.writeByte(Http2.FLAG_NONE); frame.writeInt(expectedStreamId); frame.writeInt(111111111); // custom data fr.nextFrame(new BaseTestHandler()); // Should not callback. } @Test public void onlyOneLiteralHeadersFrame() throws IOException { final List<Header> sentHeaders = headerEntries("name", "value"); Buffer headerBytes = literalHeaders(sentHeaders); writeMedium(frame, (int) headerBytes.size()); frame.writeByte(Http2.TYPE_HEADERS); frame.writeByte(FLAG_END_HEADERS | FLAG_END_STREAM); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeAll(headerBytes); assertEquals(frame, sendHeaderFrames(true, sentHeaders)); // Check writer sends the same bytes. fr.nextFrame(new BaseTestHandler() { @Override public void headers(boolean outFinished, boolean inFinished, int streamId, int associatedStreamId, List<Header> headerBlock, HeadersMode headersMode) { assertFalse(outFinished); assertTrue(inFinished); assertEquals(expectedStreamId, streamId); assertEquals(-1, associatedStreamId); assertEquals(sentHeaders, headerBlock); assertEquals(HeadersMode.HTTP_20_HEADERS, headersMode); } }); } @Test public void headersWithPriority() throws IOException { final List<Header> sentHeaders = headerEntries("name", "value"); Buffer headerBytes = literalHeaders(sentHeaders); writeMedium(frame, (int) (headerBytes.size() + 5)); frame.writeByte(Http2.TYPE_HEADERS); frame.writeByte(FLAG_END_HEADERS | FLAG_PRIORITY); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeInt(0); // Independent stream. frame.writeByte(255); // Heaviest weight, zero-indexed. frame.writeAll(headerBytes); fr.nextFrame(new BaseTestHandler() { @Override public void priority(int streamId, int streamDependency, int weight, boolean exclusive) { assertEquals(0, streamDependency); assertEquals(256, weight); assertFalse(exclusive); } @Override public void headers(boolean outFinished, boolean inFinished, int streamId, int associatedStreamId, List<Header> nameValueBlock, HeadersMode headersMode) { assertFalse(outFinished); assertFalse(inFinished); assertEquals(expectedStreamId, streamId); assertEquals(-1, associatedStreamId); assertEquals(sentHeaders, nameValueBlock); assertEquals(HeadersMode.HTTP_20_HEADERS, headersMode); } }); } /** Headers are compressed, then framed. */ @Test public void headersFrameThenContinuation() throws IOException { final List<Header> sentHeaders = largeHeaders(); Buffer headerBlock = literalHeaders(sentHeaders); // Write the first headers frame. writeMedium(frame, Http2.INITIAL_MAX_FRAME_SIZE); frame.writeByte(Http2.TYPE_HEADERS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(expectedStreamId & 0x7fffffff); frame.write(headerBlock, Http2.INITIAL_MAX_FRAME_SIZE); // Write the continuation frame, specifying no more frames are expected. writeMedium(frame, (int) headerBlock.size()); frame.writeByte(Http2.TYPE_CONTINUATION); frame.writeByte(FLAG_END_HEADERS); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeAll(headerBlock); assertEquals(frame, sendHeaderFrames(false, sentHeaders)); // Check writer sends the same bytes. // Reading the above frames should result in a concatenated headerBlock. fr.nextFrame(new BaseTestHandler() { @Override public void headers(boolean outFinished, boolean inFinished, int streamId, int associatedStreamId, List<Header> headerBlock, HeadersMode headersMode) { assertFalse(outFinished); assertFalse(inFinished); assertEquals(expectedStreamId, streamId); assertEquals(-1, associatedStreamId); assertEquals(sentHeaders, headerBlock); assertEquals(HeadersMode.HTTP_20_HEADERS, headersMode); } }); } @Test public void pushPromise() throws IOException { final int expectedPromisedStreamId = 11; final List<Header> pushPromise = Arrays.asList( new Header(Header.TARGET_METHOD, "GET"), new Header(Header.TARGET_SCHEME, "https"), new Header(Header.TARGET_AUTHORITY, "squareup.com"), new Header(Header.TARGET_PATH, "/") ); // Write the push promise frame, specifying the associated stream ID. Buffer headerBytes = literalHeaders(pushPromise); writeMedium(frame, (int) (headerBytes.size() + 4)); frame.writeByte(Http2.TYPE_PUSH_PROMISE); frame.writeByte(Http2.FLAG_END_PUSH_PROMISE); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeInt(expectedPromisedStreamId & 0x7fffffff); frame.writeAll(headerBytes); assertEquals(frame, sendPushPromiseFrames(expectedPromisedStreamId, pushPromise)); fr.nextFrame(new BaseTestHandler() { @Override public void pushPromise(int streamId, int promisedStreamId, List<Header> headerBlock) { assertEquals(expectedStreamId, streamId); assertEquals(expectedPromisedStreamId, promisedStreamId); assertEquals(pushPromise, headerBlock); } }); } /** Headers are compressed, then framed. */ @Test public void pushPromiseThenContinuation() throws IOException { final int expectedPromisedStreamId = 11; final List<Header> pushPromise = largeHeaders(); // Decoding the first header will cross frame boundaries. Buffer headerBlock = literalHeaders(pushPromise); // Write the first headers frame. writeMedium(frame, Http2.INITIAL_MAX_FRAME_SIZE); frame.writeByte(Http2.TYPE_PUSH_PROMISE); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeInt(expectedPromisedStreamId & 0x7fffffff); frame.write(headerBlock, Http2.INITIAL_MAX_FRAME_SIZE - 4); // Write the continuation frame, specifying no more frames are expected. writeMedium(frame, (int) headerBlock.size()); frame.writeByte(Http2.TYPE_CONTINUATION); frame.writeByte(FLAG_END_HEADERS); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeAll(headerBlock); assertEquals(frame, sendPushPromiseFrames(expectedPromisedStreamId, pushPromise)); // Reading the above frames should result in a concatenated headerBlock. fr.nextFrame(new BaseTestHandler() { @Override public void pushPromise(int streamId, int promisedStreamId, List<Header> headerBlock) { assertEquals(expectedStreamId, streamId); assertEquals(expectedPromisedStreamId, promisedStreamId); assertEquals(pushPromise, headerBlock); } }); } @Test public void readRstStreamFrame() throws IOException { writeMedium(frame, 4); frame.writeByte(Http2.TYPE_RST_STREAM); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeInt(ErrorCode.COMPRESSION_ERROR.httpCode); fr.nextFrame(new BaseTestHandler() { @Override public void rstStream(int streamId, ErrorCode errorCode) { assertEquals(expectedStreamId, streamId); assertEquals(ErrorCode.COMPRESSION_ERROR, errorCode); } }); } @Test public void readSettingsFrame() throws IOException { final int reducedTableSizeBytes = 16; writeMedium(frame, 12); // 2 settings * 6 bytes (2 for the code and 4 for the value). frame.writeByte(Http2.TYPE_SETTINGS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // Settings are always on the connection stream 0. frame.writeShort(1); // SETTINGS_HEADER_TABLE_SIZE frame.writeInt(reducedTableSizeBytes); frame.writeShort(2); // SETTINGS_ENABLE_PUSH frame.writeInt(0); fr.nextFrame(new BaseTestHandler() { @Override public void settings(boolean clearPrevious, Settings settings) { assertFalse(clearPrevious); // No clearPrevious in HTTP/2. assertEquals(reducedTableSizeBytes, settings.getHeaderTableSize()); assertEquals(false, settings.getEnablePush(true)); } }); } @Test public void readSettingsFrameInvalidPushValue() throws IOException { writeMedium(frame, 6); // 2 for the code and 4 for the value frame.writeByte(Http2.TYPE_SETTINGS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // Settings are always on the connection stream 0. frame.writeShort(2); frame.writeInt(2); try { fr.nextFrame(new BaseTestHandler()); fail(); } catch (IOException e) { assertEquals("PROTOCOL_ERROR SETTINGS_ENABLE_PUSH != 0 or 1", e.getMessage()); } } @Test public void readSettingsFrameInvalidSettingId() throws IOException { writeMedium(frame, 6); // 2 for the code and 4 for the value frame.writeByte(Http2.TYPE_SETTINGS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // Settings are always on the connection stream 0. frame.writeShort(7); // old number for SETTINGS_INITIAL_WINDOW_SIZE frame.writeInt(1); try { fr.nextFrame(new BaseTestHandler()); fail(); } catch (IOException e) { assertEquals("PROTOCOL_ERROR invalid settings id: 7", e.getMessage()); } } @Test public void readSettingsFrameNegativeWindowSize() throws IOException { writeMedium(frame, 6); // 2 for the code and 4 for the value frame.writeByte(Http2.TYPE_SETTINGS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // Settings are always on the connection stream 0. frame.writeShort(4); // SETTINGS_INITIAL_WINDOW_SIZE frame.writeInt(Integer.MIN_VALUE); try { fr.nextFrame(new BaseTestHandler()); fail(); } catch (IOException e) { assertEquals("PROTOCOL_ERROR SETTINGS_INITIAL_WINDOW_SIZE > 2^31 - 1", e.getMessage()); } } @Test public void readSettingsFrameNegativeFrameLength() throws IOException { writeMedium(frame, 6); // 2 for the code and 4 for the value frame.writeByte(Http2.TYPE_SETTINGS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // Settings are always on the connection stream 0. frame.writeShort(5); // SETTINGS_MAX_FRAME_SIZE frame.writeInt(Integer.MIN_VALUE); try { fr.nextFrame(new BaseTestHandler()); fail(); } catch (IOException e) { assertEquals("PROTOCOL_ERROR SETTINGS_MAX_FRAME_SIZE: -2147483648", e.getMessage()); } } @Test public void readSettingsFrameTooShortFrameLength() throws IOException { writeMedium(frame, 6); // 2 for the code and 4 for the value frame.writeByte(Http2.TYPE_SETTINGS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // Settings are always on the connection stream 0. frame.writeShort(5); // SETTINGS_MAX_FRAME_SIZE frame.writeInt((int) Math.pow(2, 14) - 1); try { fr.nextFrame(new BaseTestHandler()); fail(); } catch (IOException e) { assertEquals("PROTOCOL_ERROR SETTINGS_MAX_FRAME_SIZE: 16383", e.getMessage()); } } @Test public void readSettingsFrameTooLongFrameLength() throws IOException { writeMedium(frame, 6); // 2 for the code and 4 for the value frame.writeByte(Http2.TYPE_SETTINGS); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // Settings are always on the connection stream 0. frame.writeShort(5); // SETTINGS_MAX_FRAME_SIZE frame.writeInt((int) Math.pow(2, 24)); try { fr.nextFrame(new BaseTestHandler()); fail(); } catch (IOException e) { assertEquals("PROTOCOL_ERROR SETTINGS_MAX_FRAME_SIZE: 16777216", e.getMessage()); } } @Test public void pingRoundTrip() throws IOException { final int expectedPayload1 = 7; final int expectedPayload2 = 8; writeMedium(frame, 8); // length frame.writeByte(Http2.TYPE_PING); frame.writeByte(Http2.FLAG_ACK); frame.writeInt(0); // connection-level frame.writeInt(expectedPayload1); frame.writeInt(expectedPayload2); // Check writer sends the same bytes. assertEquals(frame, sendPingFrame(true, expectedPayload1, expectedPayload2)); fr.nextFrame(new BaseTestHandler() { @Override public void ping(boolean ack, int payload1, int payload2) { assertTrue(ack); assertEquals(expectedPayload1, payload1); assertEquals(expectedPayload2, payload2); } }); } @Test public void maxLengthDataFrame() throws IOException { final byte[] expectedData = new byte[Http2.INITIAL_MAX_FRAME_SIZE]; Arrays.fill(expectedData, (byte) 2); writeMedium(frame, expectedData.length); frame.writeByte(Http2.TYPE_DATA); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(expectedStreamId & 0x7fffffff); frame.write(expectedData); // Check writer sends the same bytes. assertEquals(frame, sendDataFrame(new Buffer().write(expectedData))); fr.nextFrame(new BaseTestHandler() { @Override public void data(boolean inFinished, int streamId, BufferedSource source, int length) throws IOException { assertFalse(inFinished); assertEquals(expectedStreamId, streamId); assertEquals(Http2.INITIAL_MAX_FRAME_SIZE, length); ByteString data = source.readByteString(length); for (byte b : data.toByteArray()) { assertEquals(2, b); } } }); } /** We do not send SETTINGS_COMPRESS_DATA = 1, nor want to. Let's make sure we error. */ @Test public void compressedDataFrameWhenSettingDisabled() throws IOException { byte[] expectedData = new byte[Http2.INITIAL_MAX_FRAME_SIZE]; Arrays.fill(expectedData, (byte) 2); Buffer zipped = gzip(expectedData); int zippedSize = (int) zipped.size(); writeMedium(frame, zippedSize); frame.writeByte(Http2.TYPE_DATA); frame.writeByte(FLAG_COMPRESSED); frame.writeInt(expectedStreamId & 0x7fffffff); zipped.readAll(frame); try { fr.nextFrame(new BaseTestHandler()); fail(); } catch (IOException e) { assertEquals("PROTOCOL_ERROR: FLAG_COMPRESSED without SETTINGS_COMPRESS_DATA", e.getMessage()); } } @Test public void readPaddedDataFrame() throws IOException { int dataLength = 1123; byte[] expectedData = new byte[dataLength]; Arrays.fill(expectedData, (byte) 2); int paddingLength = 254; byte[] padding = new byte[paddingLength]; Arrays.fill(padding, (byte) 0); writeMedium(frame, dataLength + paddingLength + 1); frame.writeByte(Http2.TYPE_DATA); frame.writeByte(FLAG_PADDED); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeByte(paddingLength); frame.write(expectedData); frame.write(padding); fr.nextFrame(assertData()); assertTrue(frame.exhausted()); // Padding was skipped. } @Test public void readPaddedDataFrameZeroPadding() throws IOException { int dataLength = 1123; byte[] expectedData = new byte[dataLength]; Arrays.fill(expectedData, (byte) 2); writeMedium(frame, dataLength + 1); frame.writeByte(Http2.TYPE_DATA); frame.writeByte(FLAG_PADDED); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeByte(0); frame.write(expectedData); fr.nextFrame(assertData()); } @Test public void readPaddedHeadersFrame() throws IOException { int paddingLength = 254; byte[] padding = new byte[paddingLength]; Arrays.fill(padding, (byte) 0); Buffer headerBlock = literalHeaders(headerEntries("foo", "barrr", "baz", "qux")); writeMedium(frame, (int) headerBlock.size() + paddingLength + 1); frame.writeByte(Http2.TYPE_HEADERS); frame.writeByte(FLAG_END_HEADERS | FLAG_PADDED); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeByte(paddingLength); frame.writeAll(headerBlock); frame.write(padding); fr.nextFrame(assertHeaderBlock()); assertTrue(frame.exhausted()); // Padding was skipped. } @Test public void readPaddedHeadersFrameZeroPadding() throws IOException { Buffer headerBlock = literalHeaders(headerEntries("foo", "barrr", "baz", "qux")); writeMedium(frame, (int) headerBlock.size() + 1); frame.writeByte(Http2.TYPE_HEADERS); frame.writeByte(FLAG_END_HEADERS | FLAG_PADDED); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeByte(0); frame.writeAll(headerBlock); fr.nextFrame(assertHeaderBlock()); } /** Headers are compressed, then framed. */ @Test public void readPaddedHeadersFrameThenContinuation() throws IOException { int paddingLength = 254; byte[] padding = new byte[paddingLength]; Arrays.fill(padding, (byte) 0); // Decoding the first header will cross frame boundaries. Buffer headerBlock = literalHeaders(headerEntries("foo", "barrr", "baz", "qux")); // Write the first headers frame. writeMedium(frame, (int) (headerBlock.size() / 2) + paddingLength + 1); frame.writeByte(Http2.TYPE_HEADERS); frame.writeByte(FLAG_PADDED); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeByte(paddingLength); frame.write(headerBlock, headerBlock.size() / 2); frame.write(padding); // Write the continuation frame, specifying no more frames are expected. writeMedium(frame, (int) headerBlock.size()); frame.writeByte(Http2.TYPE_CONTINUATION); frame.writeByte(FLAG_END_HEADERS); frame.writeInt(expectedStreamId & 0x7fffffff); frame.writeAll(headerBlock); fr.nextFrame(assertHeaderBlock()); assertTrue(frame.exhausted()); } @Test public void tooLargeDataFrame() throws IOException { try { sendDataFrame(new Buffer().write(new byte[0x1000000])); fail(); } catch (IllegalArgumentException e) { assertEquals("FRAME_SIZE_ERROR length > 16384: 16777216", e.getMessage()); } } @Test public void windowUpdateRoundTrip() throws IOException { final long expectedWindowSizeIncrement = 0x7fffffff; writeMedium(frame, 4); // length frame.writeByte(Http2.TYPE_WINDOW_UPDATE); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(expectedStreamId); frame.writeInt((int) expectedWindowSizeIncrement); // Check writer sends the same bytes. assertEquals(frame, windowUpdate(expectedWindowSizeIncrement)); fr.nextFrame(new BaseTestHandler() { @Override public void windowUpdate(int streamId, long windowSizeIncrement) { assertEquals(expectedStreamId, streamId); assertEquals(expectedWindowSizeIncrement, windowSizeIncrement); } }); } @Test public void badWindowSizeIncrement() throws IOException { try { windowUpdate(0); fail(); } catch (IllegalArgumentException e) { assertEquals("windowSizeIncrement == 0 || windowSizeIncrement > 0x7fffffffL: 0", e.getMessage()); } try { windowUpdate(0x80000000L); fail(); } catch (IllegalArgumentException e) { assertEquals("windowSizeIncrement == 0 || windowSizeIncrement > 0x7fffffffL: 2147483648", e.getMessage()); } } @Test public void goAwayWithoutDebugDataRoundTrip() throws IOException { final ErrorCode expectedError = ErrorCode.PROTOCOL_ERROR; writeMedium(frame, 8); // Without debug data there's only 2 32-bit fields. frame.writeByte(Http2.TYPE_GOAWAY); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // connection-scope frame.writeInt(expectedStreamId); // last good stream. frame.writeInt(expectedError.httpCode); // Check writer sends the same bytes. assertEquals(frame, sendGoAway(expectedStreamId, expectedError, Util.EMPTY_BYTE_ARRAY)); fr.nextFrame(new BaseTestHandler() { @Override public void goAway( int lastGoodStreamId, ErrorCode errorCode, ByteString debugData) { assertEquals(expectedStreamId, lastGoodStreamId); assertEquals(expectedError, errorCode); assertEquals(0, debugData.size()); } }); } @Test public void goAwayWithDebugDataRoundTrip() throws IOException { final ErrorCode expectedError = ErrorCode.PROTOCOL_ERROR; final ByteString expectedData = ByteString.encodeUtf8("abcdefgh"); // Compose the expected GOAWAY frame without debug data. writeMedium(frame, 8 + expectedData.size()); frame.writeByte(Http2.TYPE_GOAWAY); frame.writeByte(Http2.FLAG_NONE); frame.writeInt(0); // connection-scope frame.writeInt(0); // never read any stream! frame.writeInt(expectedError.httpCode); frame.write(expectedData.toByteArray()); // Check writer sends the same bytes. assertEquals(frame, sendGoAway(0, expectedError, expectedData.toByteArray())); fr.nextFrame(new BaseTestHandler() { @Override public void goAway( int lastGoodStreamId, ErrorCode errorCode, ByteString debugData) { assertEquals(0, lastGoodStreamId); assertEquals(expectedError, errorCode); assertEquals(expectedData, debugData); } }); } @Test public void frameSizeError() throws IOException { Http2.Writer writer = new Http2.Writer(new Buffer(), true); try { writer.frameHeader(0, 16777216, Http2.TYPE_DATA, FLAG_NONE); fail(); } catch (IllegalArgumentException e) { // TODO: real max is based on settings between 16384 and 16777215 assertEquals("FRAME_SIZE_ERROR length > 16384: 16777216", e.getMessage()); } } @Test public void ackSettingsAppliesMaxFrameSize() throws IOException { int newMaxFrameSize = 16777215; Http2.Writer writer = new Http2.Writer(new Buffer(), true); writer.ackSettings(new Settings().set(Settings.MAX_FRAME_SIZE, 0, newMaxFrameSize)); assertEquals(newMaxFrameSize, writer.maxDataLength()); writer.frameHeader(0, newMaxFrameSize, Http2.TYPE_DATA, FLAG_NONE); } @Test public void streamIdHasReservedBit() throws IOException { Http2.Writer writer = new Http2.Writer(new Buffer(), true); try { int streamId = 3; streamId |= 1L << 31; // set reserved bit writer.frameHeader(streamId, Http2.INITIAL_MAX_FRAME_SIZE, Http2.TYPE_DATA, FLAG_NONE); fail(); } catch (IllegalArgumentException e) { assertEquals("reserved bit set: -2147483645", e.getMessage()); } } private Buffer literalHeaders(List<Header> sentHeaders) throws IOException { Buffer out = new Buffer(); new Hpack.Writer(out).writeHeaders(sentHeaders); return out; } private Buffer sendHeaderFrames(boolean outFinished, List<Header> headers) throws IOException { Buffer out = new Buffer(); new Http2.Writer(out, true).headers(outFinished, expectedStreamId, headers); return out; } private Buffer sendPushPromiseFrames(int streamId, List<Header> headers) throws IOException { Buffer out = new Buffer(); new Http2.Writer(out, true).pushPromise(expectedStreamId, streamId, headers); return out; } private Buffer sendPingFrame(boolean ack, int payload1, int payload2) throws IOException { Buffer out = new Buffer(); new Http2.Writer(out, true).ping(ack, payload1, payload2); return out; } private Buffer sendGoAway(int lastGoodStreamId, ErrorCode errorCode, byte[] debugData) throws IOException { Buffer out = new Buffer(); new Http2.Writer(out, true).goAway(lastGoodStreamId, errorCode, debugData); return out; } private Buffer sendDataFrame(Buffer data) throws IOException { Buffer out = new Buffer(); new Http2.Writer(out, true).dataFrame(expectedStreamId, FLAG_NONE, data, (int) data.size()); return out; } private Buffer windowUpdate(long windowSizeIncrement) throws IOException { Buffer out = new Buffer(); new Http2.Writer(out, true).windowUpdate(expectedStreamId, windowSizeIncrement); return out; } private FrameReader.Handler assertHeaderBlock() { return new BaseTestHandler() { @Override public void headers(boolean outFinished, boolean inFinished, int streamId, int associatedStreamId, List<Header> headerBlock, HeadersMode headersMode) { assertFalse(outFinished); assertFalse(inFinished); assertEquals(expectedStreamId, streamId); assertEquals(-1, associatedStreamId); assertEquals(headerEntries("foo", "barrr", "baz", "qux"), headerBlock); assertEquals(HeadersMode.HTTP_20_HEADERS, headersMode); } }; } private FrameReader.Handler assertData() { return new BaseTestHandler() { @Override public void data(boolean inFinished, int streamId, BufferedSource source, int length) throws IOException { assertFalse(inFinished); assertEquals(expectedStreamId, streamId); assertEquals(1123, length); ByteString data = source.readByteString(length); for (byte b : data.toByteArray()) { assertEquals(2, b); } } }; } private static Buffer gzip(byte[] data) throws IOException { Buffer buffer = new Buffer(); Okio.buffer(new GzipSink(buffer)).write(data).close(); return buffer; } /** Create a sufficiently large header set to overflow Http20Draft12.INITIAL_MAX_FRAME_SIZE bytes. */ private static List<Header> largeHeaders() { String[] nameValues = new String[32]; char[] chars = new char[512]; for (int i = 0; i < nameValues.length;) { Arrays.fill(chars, (char) i); nameValues[i++] = nameValues[i++] = String.valueOf(chars); } return headerEntries(nameValues); } private static void writeMedium(BufferedSink sink, int i) throws IOException { sink.writeByte((i >>> 16) & 0xff); sink.writeByte((i >>> 8) & 0xff); sink.writeByte( i & 0xff); } }
// @(#)Globe.java 1/2004 // Copyright (c) 1998-2004, Distributed Real-time Computing Lab (DRCL) // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // 3. Neither the name of "DRCL" nor the names of its contributors may be used // to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // package drcl.inet.mac; import java.lang.*; import java.io.*; //for IOException import java.util.*; // for StringTokenizer import java.text.*; //for NumberFormat /** * This class extracts the altitude of any place on earth from the Globe database. <br> WARNING: there is some code that MAY need to be changed depending on your computer system. This is because the GLOBE data base is stored in a binary format and unix systems must byte swap the data. PCs do not. The code can be found in subroutine getGlobeData. Follow instructions in the comments. This implementation has been tested on PCs with both Linux and Windows operating systems. If you are not sure if it extracts the right data on your computer system, use GlobeTest in this package to test it. * @author Honghai Zhang * @see drcl.inet.mac.GlobeTest */ public class Globe { /** * Constructs a Globe with transmitter and receiver's locations, number of sampling points * and path containing the globe database. */ public Globe(double tx_lat, double tx_lon, double rx_lat, double rx_lon, int nPts, String globepath) { txLat = tx_lat; txLon = tx_lon; rxLat = rx_lat; rxLon = rx_lon; nPoints = nPts; globePath = globepath; lastTile = -1; globeFile = null; } /** * Constructs a Globe with the path containing the globe database. */ public Globe(String globepath) { txLat = 0; txLon = 0; rxLat = 0; rxLon = 0; nPoints = 0; globePath = globepath; lastTile = -1; globeFile = null; } /** * Reset transmitter and receiver's locations and number of sampling points. */ public void resetPosition(double tx_lat, double tx_lon, double rx_lat, double rx_lon, int nPts) { txLat = tx_lat; txLon = tx_lon; rxLat = rx_lat; rxLon = rx_lon; nPoints = nPts; } /****************************************************************** Extract a path profile array from Tx to Rx from the GLOBE data base. Elevation points are in meters. The great circle SHORTEST path will be extracted from the data base. Longitude values should be in the range [-180 to +180 degrees], with +lon=East. <br> pfl = array to fill (result will be in meters) <blockquote> pfl(0) = number of points to get between Tx and Rx<br> pfl(1) = distance between points (meters) thus, (pfl(0)-1)*pfl(1)=distance between Tx and Rx <br> pfl(2) = Tx elevation in meters <br> pfl(npoints+1) = Rx elevation in meters. </blockquote> Return positive if no GLOBE data exists or error in Globe data file. <br> Written for the NOAA's Globe Version 1.0 elevation data. */ public int get_pfl(double [] pfl) { double delta; ztht = GlobeElevation(txLon,txLat); // height at Tx if (ztht < -500) return 1; //error in Globe data file zrht = GlobeElevation(rxLon, rxLat); //height at Rx if (zrht < -500) return 2; ztlat = txLat; ztlon = txLon; zrlat = rxLat; zrlon = rxLon; dazel(0); //calc ztaz & zdgc pfl[0] = nPoints; delta = zdgc/(double)(nPoints -1); pfl[1] = delta * 1000; pfl[2] = ztht; pfl[nPoints+1] = zrht; NumberFormat nf = NumberFormat.getNumberInstance(); //System.out.println("true = "+ zdgc); for (int i = 3; i <= nPoints; i++) { zdgc = (double)(i-2) * delta; dazel(1); // calc zrlat, zrlon //System.out.println(i+ " " + nf.format(zrlon) + " " + nf.format(zrlat) ); double zz = GlobeElevation(zrlon, zrlat); // height at point if (zz < -500) return i; pfl[i] = zz; } return 0; } /** void DAZEL(MODE) Great circle calculations. TWO MODES-- 0 INPUT LAT AND LON OF END POINT RETURN DISTANCE AND AZIMUTH TO END PT WITH ELEVATIONS 1 INPUT BEARING (AZIMUTH) OF END POINT RETURN LAT AND LON OF END POINT WITH ELEVATIONS MODE 0 INPUT PARAMETERS (THESE DEFINE LOCATION OF POINTS T (TRANSMITTER) AND R (RECEIVER) RELATIVE TO A SPHERICAL EARTH. ZTLAT - LATITUDE (DECIMAL DEGREES NORTH OF EQUATOR) OF POINT T ZTLON - LONGITUDE (DECIMAL DEGREES EAST OF PRIME (GREENWICH) MERIDIAN) OF POINT T ZTHT - HEIGHT (METERS ABOVE MEAN SEA LEVEL) OF POINT T ZRLAT - LATITUDE (DECIMAL DEGREES NORTH OF EQUATOR) OF POINT R ZRLON - LONGITUDE (DECIMAL DEGREES EAST OF PRIME MERIDIAN OF POINT R ZRHT - HEIGHT (METERS ABOVE MEAN SEA LEVEL) OF POINT R OUTPUT PARAMETERS ZTAZ - AZUMUTH (DECIMAL DEGREES CLOCKWISE FROM NORTH) AT T OF R ZRAZ - AZIMUTH (DECIMAL DEGREES CLOCKWISE FROM NORTH) AT R OF T ZTELV - ELEVATION ANGLE (DECIMAL DEGREES ABOVE HORIZONTAL AT T OF STRAIGHT LINE BETWEEN T AND R ZRELV - ELEVATION ANGLE (DECIMAL DEGREES ABOVE HORIZONTAL AT R) OF STRAIGHT LINE BETWEEN T AND R ZTAKOF - TAKE-OFF ANGLE (DECIMAL DEGREES ABOVE HORIZONTAL AT T) OF REFRACTED RAY BETWEEN T AND R (ASSUMED 4/3 EARTH RADIUS) ZRAKOF - TAKE-OFF ANGLE (DECIMAL DEGREES ABOVE HORIZONTAL AT R) OF REFRACTED RAY BETWEEN T AND R (ASSUMED 4/3 EARTH RADIUS) ZD - STRAIGHT LINE DISTANCE (KILOMETERS) BETWEEN T AND R ZDGC - GREAT CIRCLE DISTANCE (KILOMETERS) BETWEEN T AND R MODE 1 INPUT PARAMETERS OUTPUT PARAMETERS ZTLAT ZRLAT ZTLON ZRLON ZTAZ RELEV,ZRAKOF ZDGC TELEV,ZTAKOF ALL OF THE ABOVE PARAMETERS START WITH THE LETTER Z AND ARE SINGLE PRECISION. ALL PROGRAM VARIABLES ARE DOUBLE PRECISION. PROGRAM IS UNPREDICTABLE FOR SEPARATIONS LESS THAN 0.00005 DEGREES, ABOUT 5 METERS. */ void dazel(int mode) { double pi = 3.141592653589793238462643; double rerth = 6370; double dtor = 0.01745329252; double rtod = 57.29577951; //temporary variables double tlats, tlons,thts, rlats, rlons, rhts; double delat, delon, adlat,adlon,delht ; double gc, sgc, d; double p; if (mode == 0) { tlats = ztlat; tlons = ztlon; thts = ztht * 1.E-3; rlats = zrlat; rlons = zrlon; rhts = zrht * 1.0E-3; if (tlats <= - 90.0) tlats = -89.99; if (tlats >= 90.0 ) tlats = 89.99; if (rlats <= -90.0) rlats = -89.99; if (rlats >= 90.0 ) rlats = 89.99; delat = rlats - tlats; adlat = Math.abs(delat); delon = rlons - tlons; while (delon < -180.0) delon += 360; while (delon > 180.0) delon -= 360; adlon = Math.abs(delon); delht = rhts - thts; if (adlon <= 1.0E-5) { if (adlat <= 1.E-5) { //point T and R have the same coordinate ztaz = 0; zraz = 0; if (delht < 0) { ztelv = -90; zrelv = 90; zd = - delht; zdgc = 0.0; } else if (delht == 0.0) { ztelv = 0.0; zrelv = 0.0; zd = 0.0; zdgc = 0.0; } else { ztelv = 90; zrelv = -90; zd = delht; zdgc = 0.0; } return; } else { //point T and R has same longitude, distinct latitudes if (delat <= 0.0) { ztaz = 180.0; zraz = 0.0; } else { ztaz = 0.0; zraz = 180; } gc = adlat * dtor; sgc = Math.sin(0.5*gc); d = Math.sqrt(delht * delht + 4.0 * (rerth + thts) * (rerth + rhts) * sgc * sgc); zd = d; zdgc = gc * rerth; } } else { //point R and T have distince longtitudes double wlat, elat; if (delon <= 0.0) { wlat = rlats * dtor; elat = tlats * dtor; } else { wlat = tlats * dtor; elat = rlats * dtor; } //calculate azimuths at points W and E double sdlat, sdlon, sadln, celat,cwlat; double cwaz, swaz, waz,ceaz, seaz, eaz; sdlat = Math.sin(0.5 * adlat *dtor); sdlon = Math.sin(0.5 * adlon*dtor); sadln = Math.sin(adlon * dtor); cwlat = Math.cos(wlat); celat = Math.cos(elat); p = 2.0 * (sdlat * sdlat + sdlon * sdlon * cwlat * celat ); sgc = Math.sqrt(p * (2.0-p)); sdlat = Math.sin(elat - wlat); cwaz = (2.0 * celat * Math.sin(wlat) * sdlon * sdlon + sdlat)/sgc; swaz = sadln * celat/sgc; waz = Math.atan2(swaz, cwaz) * rtod; ceaz = (2.0 * cwlat * Math.sin(elat) * sdlon * sdlon - sdlat)/sgc; seaz = sadln * cwlat/sgc; eaz = Math.atan2(seaz, ceaz) * rtod; eaz = 360.0 - eaz; if (delon <= 0.0) { ztaz = eaz; zraz = waz; } else { ztaz = waz; zraz = eaz; } //compute the straight line distance and great circle angle between T and R double cgc; d = Math.sqrt(delht * delht + 2.0 * (rerth + thts) * (rerth + rhts) * p); zd =d; cgc = 1.0 - p; gc = Math.atan2(sgc, cgc); zdgc = gc * rerth; } } else { //mode == 1 double tlatr, tlonr, tazr, colat, cosco, sinco; double cosgc, singc, cosb; double arg, b, arc; double rdlon, drlat; tlatr = ztlat * dtor; tlonr = ztlon * dtor; tazr = ztaz * dtor; gc = zdgc / rerth; colat = pi/2.0 - tlatr; cosco = Math.cos(colat); sinco = Math.sin(colat); cosgc = Math.cos(gc); singc = Math.sin(gc); cosb = cosco * cosgc + sinco * singc * Math.cos(tazr); arg = Math.max(0, (1- cosb * cosb)); b = Math.atan2(Math.sqrt(arg), cosb); arc = (cosgc - cosco* cosb) / (sinco * Math.sin(b)); arg = Math.max(0, (1.0 - arc * arc)); rdlon = Math.atan2(Math.sqrt(arg), arc); zrlat = (pi/2 - Math.abs(b)) * rtod; drlat = zrlat ; zrlat = Math.abs(drlat) * cosb / Math.abs(cosb); zrlon = ztlon + Math.abs(rdlon) * rtod; if (ztaz > 180) zrlon = ztlon - (Math.abs(rdlon) * rtod); thts = ztht * 1.0E-3; rhts = zrht * 1.0E-3; delht = rhts - thts; sgc = Math.sin(0.5* gc); d = Math.sqrt(delht * delht + 4.0 * (rerth + thts) * (rerth + rhts) * sgc * sgc); } //code for both mode double aht, bht; if (delht < 0) { aht = thts; bht = rhts; } else { aht = rhts; bht = thts; } double saelv, arg, aelv,belv; saelv = 0.5 * (d * d + Math.abs(delht) * (rerth + aht + rerth + bht))/(d* (rerth + aht) ); arg = Math.max(0, (1- saelv * saelv)); aelv = Math.atan2(saelv, Math.sqrt(arg)); belv = (aelv - gc)* rtod; aelv = - aelv * rtod; //compute take-off angels assuming 4/3 earth radius; double r4thd, aalt, balt, da, atakof, btakof; r4thd = rerth * 4.0/3.0; gc = 0.75 * gc; sgc = Math.sin(0.5 * gc); p = 2.0 * sgc * sgc; aalt = r4thd + aht; balt = r4thd + bht; da = Math.sqrt(delht * delht + 2.0 * aalt * balt * p); saelv = 0.5 * (da * da + Math.abs(delht) * (aalt + balt))/(da * aalt); arg = Math.max(0, (1 - saelv * saelv)); atakof = Math.atan(saelv/Math.sqrt(arg)); btakof = (atakof - gc) * rtod; atakof = - atakof * rtod; if (delht < 0) { ztelv = aelv; zrelv = belv; ztakof = atakof; zrakof = btakof; } else { ztelv = belv; zrelv = aelv; ztakof = btakof; zrakof = atakof; } } /** extract the GLOBE elevation for (xxlon,xxlat) GLOBE_elevation= elevation in meters of point = < -500 = file does not exist This should only happen if your data files are not in the directory specified by path. NOTE: GLOBE flags ocean values as -500. These routines change any -500 value to 0. If you wish to identify ocean values, you should modify the code in get_GLOBE_data to suit your needs. ************************************************************ The elevation of the 4 points that contain the (xxlon,xxlat) are found and the elevation is interpolated. The 4 points are: 2 3 1 4 ************************************************************ In order to get the same value at Latitude=-90 regardless of longitude, any Latitude below -89.99167 has been forced to = 2777 meters elevation. This is because the lowest latitude data record corresponds to latitude=-89.9916666666..., which is NOT the South Pole, and the values at different longitude are slightly different. *************************************************************/ double GlobeElevation(double xxLon, double xxLat) { String indexFileName = globePath + File.separator + "globe.dat"; if (ionce == 0) { try { BufferedReader in = new BufferedReader(new FileReader(indexFileName) ); String line = in.readLine(); for (int i = 0; i < 16;i ++) { line = in.readLine(); StringTokenizer t = new StringTokenizer(line); tiles[i] = t.nextToken(); } in.close(); ionce = 1; } catch(IOException e) { System.out.println("File " + indexFileName+" not exists " + e.getMessage()); System.exit(1); } } if (xxLat < -89.99167) { return 2777; //south pole: 2777 meters } if (xxLon < 0) xxLon = xxLon + 360; Location loc = new Location(); Fraction frac = new Fraction(); FPosition fpos = new FPosition(); double [] elev = new double[4]; double elevation = 0; GlobeIndex(xxLat, xxLon, loc, frac); int y1 = loc.y+1; if (y1 > 21599) y1 = 21599; int x1 = loc.x + 1; if (x1 > 43199) x1 = 0; try { elev[0] = getElevation(loc); loc.y ++ ; elev[1] = getElevation(loc); loc.x++; elev[2] = getElevation(loc); loc.y--; elev[3] = getElevation(loc); elevation = GlobeInterp(frac, elev); //interpolate to find elevation at (xlat, xlon). } catch (IOException e) { System.out.println("Open or read Globe data error " + e.getMessage()); System.exit(1); } return elevation; } int getElevation(Location loc) throws IOException { Integer elevation = (Integer) elevationTable.get(loc); if (elevation != null ) { return elevation.intValue(); } else { FPosition fpos = new FPosition();; GlobeRecord(loc.x, loc.y, fpos); int elev = getGlobeData(fpos); elevationTable.put(loc.clone(), new Integer(elev)); return elev; } } /*********************************************************************** Open a GLOBE data file and get the elevation corresponding to a particular cell corner. <br> Return elevation value in meters. <br> The GLOBE database contains -500 to signify ocean. That value is converted to 0 in this routine. If you wish to do something different, do so in the routine. */ int getGlobeData(FPosition fpos) throws IOException { //do we need to open a new data file? if (lastTile != fpos.tile ) { // open a new Globe file if (lastTile != -1) globeFile.close(); String fileName = globePath + File.separator + tiles[fpos.tile-1]; globeFile = new RandomAccessFile(fileName, "r"); lastTile = fpos.tile; } globeFile.seek(fpos.record*2); int data = globeFile.readUnsignedShort(); //important to readUnsignedShort //swap the byte ordering, //I guess it is needed here because the file is stored using small-endian int tmp = data & 255; data = (short)(tmp << 8) + (data >>8); //important to convert type first if (data == -500) data = 0; return data; } /** Given: (ix,iy) - the Globe cell location Find: fpos(tile - tile index containing (x,y) record - record number within itile containing elevation */ void GlobeRecord(int ix,int iy, FPosition fpos) { int icol = ix % 10800; int jx = ix /10800 + 1; int jy; int irow; if (iy >= 16800) { jy = 0; irow = 21600 - iy; } else if (iy >= 10800) { jy = 1; irow = 16800 - iy; } else if (iy >= 4800) { jy = 2; irow = 10800 - iy; } else if (iy >= 0) { jy = 3; irow = 4800 - iy; } else { System.out.println("wrong parameters in Globe.GlobeRecord"); jy = -1; irow = 0; } fpos.tile = jx + jy * 4; fpos.record = (irow -1) * 10800 + icol ; //no plus 1 because java start from 0 return; } /** given: (xlat,xlon) find: Location loc(x,y) of lower left corner of cell containing (xlat,xlon) x will range from [0 to 43199]. 0 = -179.99583 longitude y will range from [0 to 21599]. 0 = -89.99583 Fraction frac(dx,dy) is used to interpolate. It is the fraction of cell where (xlat,xlon) is located. Note: Globe data base cell size = 30 seconds = 1/120 degree = .0083333 */ void GlobeIndex(double xlat, double xlon, Location loc, Fraction frac) { double dlat, dlon, x,y; dlat = xlat + 90; y = dlat * 120 - 0.5; loc.y = (int) y; dlon = xlon; if (xlon < 0) dlon = dlon+360; if (dlon >= 180) { x = (dlon - 180) * 120 - 0.5; if (x < 0) x += 43200; } else { x = (dlon + 180) * 120 - 0.5; } loc.x = (int) x; frac.dx = x - loc.x; frac.dy = y - loc.y; if ( frac.dy < 0) { loc.y = 0; frac.dy = 0; } return; } //bilinear interpolation routine double GlobeInterp(Fraction frac, double [] elev) { double z01 = elev[0] + (elev[1] - elev[0]) * frac.dy; double z23 = elev[3] + (elev[2] - elev[3] ) * frac.dy; double zp = z01 + (z23 - z01) * frac.dx; return zp; } /** Release the open file resources. */ public void dispose() { try { if (globeFile != null) globeFile.close(); } catch (IOException e) { System.err.println("globeFile close error " + e.getMessage()); } } //careful when use this function because it modifies the sender and receiver's location, //should only be used for calculating altitude. double distance(double tLat, double tLon, double rLat, double rLon) { ztlat = tLat; ztlon = tLon; zrlat = rLat; zrlon = rLon; dazel(0); return zdgc * 1000; } double txLat; //transmitter latitude double txLon; //transmitter longitude double rxLat; //receiver latitude double rxLon; //receiver longitude int nPoints; // number of points from source to destination String globePath; //path to the globe data. //variable used by dazel to calculate great circle paths. double ztlat, ztlon, ztht, zrlat, zrlon, zrht, ztaz, zraz; double ztelv, zrelv, zd, zdgc, ztakof, zrakof; //variable used for globe initiation static String [] tiles = new String[16]; static int ionce = 0; //indicate whether the file globe.dat has been read. //used for access the data file int lastTile; RandomAccessFile globeFile; class Fraction { public double dx, dy; //dx, dy is the fraction of cell where xlat, xlon is located }; static class FPosition { public int tile; // the index containing location(x,y), public int record; //record number within itile containing elevation. }; static Hashtable elevationTable = new Hashtable(1001); } class Location { public int x,y; Location () { x = y = 0; } Location (int x_, int y_) { x = x_; y = y_; } Location (Location that) { x = that.x; y = that.y; } public boolean equals(Object that_obj) { Location that_ = (Location) that_obj; boolean v = ( (x == that_.x) && (y == that_.y)); return v; } public int hashCode() { return x ^ ((y << 16) + (y >>16) ) ; } protected Object clone() { return new Location(x,y); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.util; /** * Copied from apache/hbase. * @see https://github.com/apache/hbase/blob/master/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedMutableByteRange.java */ @SuppressWarnings("java/inconsistent-equals-and-hashcode") // ignore lgtm error public class SimplePositionedMutableByteRange implements PositionedByteRange { public static final int UNSET_HASH_VALUE = -1; // Note to maintainers: Do not make these final, as the intention is to // reuse objects of this class /** * The array containing the bytes in this range. It will be &gt;= length. */ private byte[] bytes; /** * The index of the first byte in this range. {@code ByteRange.get(0)} will return bytes[offset]. */ private int offset; /** * The number of bytes in the range. Offset + length must be &lt;= bytes.length */ private int length; /** * Variable for lazy-caching the hashCode of this range. Useful for frequently used ranges, long-lived ranges, or long ranges. */ private int hash = UNSET_HASH_VALUE; /** * The current index into the range. Like {@link java.nio.ByteBuffer} position, it points to the next value that will be read/written in the * array. It provides the appearance of being 0-indexed, even though its value is calculated according to offset. * <p> * Position is considered transient and does not participate in {@link #equals(Object)} or {@link #hashCode()} comparisons. * </p> */ private int position = 0; private int limit = 0; public SimplePositionedMutableByteRange() { } public SimplePositionedMutableByteRange(int capacity) { this(new byte[capacity]); } public SimplePositionedMutableByteRange(byte[] bytes) { set(bytes); } public SimplePositionedMutableByteRange(byte[] bytes, int offset, int length) { set(bytes, offset, length); } @Override public PositionedByteRange unset() { this.position = 0; clearHashCache(); bytes = null; offset = 0; length = 0; return this; } @Override public PositionedByteRange set(int capacity) { this.position = 0; set(new byte[capacity]); this.limit = capacity; return this; } @Override public PositionedByteRange set(byte[] bytes) { this.position = 0; if (null == bytes) return unset(); clearHashCache(); this.bytes = bytes; this.offset = 0; this.length = bytes.length; this.limit = bytes.length; return this; } @Override public PositionedByteRange set(byte[] bytes, int offset, int length) { this.position = 0; if (null == bytes) return unset(); clearHashCache(); this.bytes = bytes; this.offset = offset; this.length = length; limit = length; return this; } @Override public int getOffset() { return offset; } @Override public int getPosition() { return position; } @Override public int getLimit() { return this.limit; } @Override public byte[] getBytes() { return bytes; } @Override public byte peek() { return bytes[offset + position]; } @Override public PositionedByteRange setPosition(int position) { this.position = position; return this; } @Override public int getRemaining() { return length - position; } @Override public byte get() { return get(position++); } @Override public byte get(int index) { return bytes[offset + index]; } @Override public PositionedByteRange get(byte[] dst) { if (0 == dst.length) return this; return this.get(dst, 0, dst.length); // be clear we're calling self, not super } @Override public PositionedByteRange get(byte[] dst, int offset, int length) { if (0 == length) return this; get(this.position, dst, offset, length); this.position += length; return this; } @Override public PositionedByteRange get(int index, byte[] dst, int offset, int length) { if (0 == length) return this; System.arraycopy(this.bytes, this.offset + index, dst, offset, length); return this; } @Override public short getShort() { short s = getShort(position); position += Bytes.SIZEOF_SHORT; return s; } @Override public int getInt() { int i = getInt(position); position += Bytes.SIZEOF_INT; return i; } @Override public long getLong() { long l = getLong(position); position += Bytes.SIZEOF_LONG; return l; } @Override public long getVLong() { long p = getVLong(position); position += getVLongSize(p); return p; } @Override public short getShort(int index) { int offset = this.offset + index; short n = 0; n ^= bytes[offset] & 0xFF; n <<= 8; n ^= bytes[offset + 1] & 0xFF; return n; } @Override public int getInt(int index) { int offset = this.offset + index; int n = 0; for (int i = offset; i < (offset + Bytes.SIZEOF_INT); i++) { n <<= 8; n ^= bytes[i] & 0xFF; } return n; } @Override public long getLong(int index) { int offset = this.offset + index; long l = 0; for (int i = offset; i < offset + Bytes.SIZEOF_LONG; i++) { l <<= 8; l ^= bytes[i] & 0xFF; } return l; } // Copied from com.google.protobuf.CodedInputStream v2.5.0 readRawVarint64 @Override public long getVLong(int index) { int shift = 0; long result = 0; while (shift < 64) { final byte b = get(index++); result |= (long) (b & 0x7F) << shift; if ((b & 0x80) == 0) { break; } shift += 7; } return result; } // end of copied from protobuf public static int getVLongSize(long val) { int rPos = 0; while ((val & ~0x7F) != 0) { val >>>= 7; rPos++; } return rPos + 1; } @Override public PositionedByteRange put(byte val) { put(position++, val); return this; } @Override public PositionedByteRange put(byte[] val) { if (0 == val.length) return this; return this.put(val, 0, val.length); } @Override public PositionedByteRange put(int index, byte val) { bytes[offset + index] = val; return this; } @Override public PositionedByteRange put(int index, byte[] val) { if (0 == val.length) return this; return put(index, val, 0, val.length); } @Override public PositionedByteRange put(byte[] val, int offset, int length) { if (0 == length) return this; put(position, val, offset, length); this.position += length; return this; } @Override public PositionedByteRange put(int index, byte[] val, int offset, int length) { if (0 == length) return this; System.arraycopy(val, offset, this.bytes, this.offset + index, length); return this; } @Override public PositionedByteRange putShort(short val) { putShort(position, val); position += Bytes.SIZEOF_SHORT; return this; } @Override public PositionedByteRange putInt(int val) { putInt(position, val); position += Bytes.SIZEOF_INT; return this; } @Override public PositionedByteRange putLong(long val) { putLong(position, val); position += Bytes.SIZEOF_LONG; return this; } @Override public int putVLong(long val) { int len = putVLong(position, val); position += len; return len; } @Override public PositionedByteRange putShort(int index, short val) { // This writing is same as BB's putShort. When byte[] is wrapped in a BB and // call putShort(), // one can get the same result. bytes[offset + index + 1] = (byte) val; val >>= 8; bytes[offset + index] = (byte) val; clearHashCache(); return this; } @Override public PositionedByteRange putInt(int index, int val) { // This writing is same as BB's putInt. When byte[] is wrapped in a BB and // call getInt(), one // can get the same result. for (int i = Bytes.SIZEOF_INT - 1; i > 0; i--) { bytes[offset + index + i] = (byte) val; val >>>= 8; } bytes[offset + index] = (byte) val; clearHashCache(); return this; } @Override public PositionedByteRange putLong(int index, long val) { // This writing is same as BB's putLong. When byte[] is wrapped in a BB and // call putLong(), one // can get the same result. for (int i = Bytes.SIZEOF_LONG - 1; i > 0; i--) { bytes[offset + index + i] = (byte) val; val >>>= 8; } bytes[offset + index] = (byte) val; clearHashCache(); return this; } // Copied from com.google.protobuf.CodedOutputStream v2.5.0 writeRawVarint64 @Override public int putVLong(int index, long val) { int rPos = 0; while (true) { if ((val & ~0x7F) == 0) { bytes[offset + index + rPos] = (byte) val; break; } else { bytes[offset + index + rPos] = (byte) ((val & 0x7F) | 0x80); val >>>= 7; } rPos++; } clearHashCache(); return rPos + 1; } // end copied from protobuf @Override public int getLength() { return length; } @Override public int hashCode() { if (isHashCached()) {// hash is already calculated and cached return hash; } if (this.isEmpty()) {// return 0 for empty ByteRange hash = 0; return hash; } int off = offset; hash = 0; for (int i = 0; i < length; i++) { hash = 31 * hash + bytes[off++]; } return hash; } protected boolean isHashCached() { return hash != UNSET_HASH_VALUE; } protected void clearHashCache() { hash = UNSET_HASH_VALUE; } @Override public boolean isEmpty() { return isEmpty(this); } /** * @return true when {@code range} is of zero length, false otherwise. */ public static boolean isEmpty(PositionedByteRange range) { return range == null || range.getLength() == 0; } @Override public int compareTo(ByteRange other) { return Bytes.BYTES_COMPARATOR.compare(bytes, offset, length, other.getBytes(), other.getOffset(), other.getLength()); } }
package com.braintreegateway.integrationtest; import com.braintreegateway.*; import com.braintreegateway.SandboxValues.CreditCardNumber; import com.braintreegateway.SandboxValues.TransactionAmount; import java.util.*; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeEach; import static org.junit.jupiter.api.Assertions.*; public class MerchantIT extends IntegrationTest { private MerchantAccount getMerchantAccountForCurrency(Merchant merchant, String currency) { for (MerchantAccount merchantAccount : merchant.getMerchantAccounts()) { if (merchantAccount.getId().equals(currency)) { return merchantAccount; } } return null; } @BeforeEach public void createGateway() { this.gateway = new BraintreeGateway( "client_id$development$integration_client_id", "client_secret$development$integration_client_secret" ); } @Test public void createMerchantTest() { MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("USA"). paymentMethods(Arrays.asList("credit_card", "paypal")). scope("read_write,shared_vault_transactions"); Result<Merchant> result = gateway.merchant().create(request); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); assertEquals("name@email.com", result.getTarget().getEmail()); assertEquals("name@email.com", result.getTarget().getCompanyName()); assertEquals("USA", result.getTarget().getCountryCodeAlpha3()); assertEquals("US", result.getTarget().getCountryCodeAlpha2()); assertEquals("840", result.getTarget().getCountryCodeNumeric()); assertEquals("United States of America", result.getTarget().getCountryName()); assertTrue(result.getTarget().getCredentials().getAccessToken().startsWith("access_token")); assertTrue(result.getTarget().getCredentials().getExpiresAt().after(Calendar.getInstance())); assertTrue(result.getTarget().getCredentials().getRefreshToken().startsWith("refresh_token")); assertEquals(result.getTarget().getCredentials().getScope(), "read_write,shared_vault_transactions"); assertEquals("bearer", result.getTarget().getCredentials().getTokenType()); } @Test public void createFailsWithInvalidPaymentMethods() { MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("USA"). paymentMethods(Arrays.asList("fake_money")); Result<Merchant> result = gateway.merchant().create(request); assertFalse(result.isSuccess()); assertEquals(ValidationErrorCode.MERCHANT_PAYMENT_METHODS_ARE_INVALID, result.getErrors().forObject("merchant").onField("payment-methods").get(0).getCode()); } @Test public void createWithPayPalOnly() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest request = new MerchantRequest(). email("name2@email.com"). countryCodeAlpha3("USA"). paymentMethods(Arrays.asList("paypal")). payPalAccount(). clientId("paypal_client_id"). clientSecret("paypal_client_secret"). done(); Result<Merchant> result = gateway.merchant().create(request); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); assertTrue(result.getTarget().getCredentials().getAccessToken().startsWith("access_token")); assertTrue(result.getTarget().getCredentials().getRefreshToken().startsWith("refresh_token")); assertTrue(result.getTarget().getCredentials().getExpiresAt().after(Calendar.getInstance())); assertEquals("bearer", result.getTarget().getCredentials().getTokenType()); } @Test public void payPalOnlyAccountCannotRunCreditCardTransactions() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest merchantRequest = new MerchantRequest(). email("name2@email.com"). countryCodeAlpha3("USA"). paymentMethods(Arrays.asList("paypal")). payPalAccount(). clientId("paypal_client_id"). clientSecret("paypal_client_secret"). done(); Result<Merchant> result = gateway.merchant().create(merchantRequest); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); BraintreeGateway gateway = new BraintreeGateway(result.getTarget().getCredentials().getAccessToken()); TransactionRequest transactionRequest = new TransactionRequest(). amount(TransactionAmount.AUTHORIZE.amount). creditCard(). number(CreditCardNumber.VISA.number). expirationDate("05/2009"). done(); Result<Transaction> transactionResult = gateway.transaction().sale(transactionRequest); assertTrue(transactionResult.getMessage().contains("Merchant account does not support payment instrument.")); assertFalse(transactionResult.isSuccess()); } @Test public void createMultiCurrencyUSMerchant() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("USA"). companyName("Ziarog LTD"). paymentMethods(Arrays.asList(new String[]{"credit_card", "paypal"})). currencies(Arrays.asList("GBP", "USD")); Result<Merchant> result = gateway.merchant().create(request); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); assertEquals("name@email.com", result.getTarget().getEmail()); assertEquals("Ziarog LTD", result.getTarget().getCompanyName()); assertEquals("USA", result.getTarget().getCountryCodeAlpha3()); assertEquals("US", result.getTarget().getCountryCodeAlpha2()); assertEquals("840", result.getTarget().getCountryCodeNumeric()); assertEquals("United States of America", result.getTarget().getCountryName()); assertTrue(result.getTarget().getCredentials().getAccessToken().startsWith("access_token")); assertTrue(result.getTarget().getCredentials().getExpiresAt().after(Calendar.getInstance())); assertTrue(result.getTarget().getCredentials().getRefreshToken().startsWith("refresh_token")); assertEquals("bearer", result.getTarget().getCredentials().getTokenType()); assertEquals(2, result.getTarget().getMerchantAccounts().size()); MerchantAccount usdMerchantAccount = getMerchantAccountForCurrency(result.getTarget(), "USD"); assertNotNull(usdMerchantAccount); assertEquals("USD", usdMerchantAccount.getCurrencyIsoCode()); assertTrue(usdMerchantAccount.isDefault()); MerchantAccount gbpMerchantAccount = getMerchantAccountForCurrency(result.getTarget(), "GBP"); assertNotNull(gbpMerchantAccount); assertEquals("GBP", gbpMerchantAccount.getCurrencyIsoCode()); assertFalse(gbpMerchantAccount.isDefault()); } @Test public void createMultiCurrencyEUMerchant() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("GBR"). companyName("Ziarog LTD"). paymentMethods(Arrays.asList("credit_card", "paypal")). currencies(Arrays.asList("GBP", "USD")); Result<Merchant> result = gateway.merchant().create(request); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); assertEquals("name@email.com", result.getTarget().getEmail()); assertEquals("Ziarog LTD", result.getTarget().getCompanyName()); assertEquals("GBR", result.getTarget().getCountryCodeAlpha3()); assertEquals("GB", result.getTarget().getCountryCodeAlpha2()); assertEquals("826", result.getTarget().getCountryCodeNumeric()); assertEquals("United Kingdom", result.getTarget().getCountryName()); assertTrue(result.getTarget().getCredentials().getAccessToken().startsWith("access_token")); assertTrue(result.getTarget().getCredentials().getExpiresAt().after(Calendar.getInstance())); assertTrue(result.getTarget().getCredentials().getRefreshToken().startsWith("refresh_token")); assertEquals("bearer", result.getTarget().getCredentials().getTokenType()); assertEquals(2, result.getTarget().getMerchantAccounts().size()); MerchantAccount usdMerchantAccount = getMerchantAccountForCurrency(result.getTarget(), "USD"); assertNotNull(usdMerchantAccount); assertEquals("USD", usdMerchantAccount.getCurrencyIsoCode()); assertFalse(usdMerchantAccount.isDefault()); MerchantAccount gbpMerchantAccount = getMerchantAccountForCurrency(result.getTarget(), "GBP"); assertNotNull(gbpMerchantAccount); assertEquals("GBP", gbpMerchantAccount.getCurrencyIsoCode()); assertTrue(gbpMerchantAccount.isDefault()); } @Test public void createPayPalOnlyMultiCurrencyMerchant() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("USA"). companyName("Ziarog LTD"). paymentMethods(Arrays.asList("paypal")). currencies(Arrays.asList("GBP", "USD")). payPalAccount(). clientId("paypal_client_id"). clientSecret("paypal_client_secret"). done(); Result<Merchant> result = gateway.merchant().create(request); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); assertEquals("name@email.com", result.getTarget().getEmail()); assertEquals("Ziarog LTD", result.getTarget().getCompanyName()); assertEquals("USA", result.getTarget().getCountryCodeAlpha3()); assertEquals("US", result.getTarget().getCountryCodeAlpha2()); assertEquals("840", result.getTarget().getCountryCodeNumeric()); assertEquals("United States of America", result.getTarget().getCountryName()); assertTrue(result.getTarget().getCredentials().getAccessToken().startsWith("access_token")); assertTrue(result.getTarget().getCredentials().getExpiresAt().after(Calendar.getInstance())); assertTrue(result.getTarget().getCredentials().getRefreshToken().startsWith("refresh_token")); assertEquals("bearer", result.getTarget().getCredentials().getTokenType()); assertEquals(2, result.getTarget().getMerchantAccounts().size()); MerchantAccount usdMerchantAccount = getMerchantAccountForCurrency(result.getTarget(), "USD"); assertNotNull(usdMerchantAccount); assertEquals("USD", usdMerchantAccount.getCurrencyIsoCode()); assertTrue(usdMerchantAccount.isDefault()); MerchantAccount gbpMerchantAccount = getMerchantAccountForCurrency(result.getTarget(), "GBP"); assertNotNull(gbpMerchantAccount); assertEquals("GBP", gbpMerchantAccount.getCurrencyIsoCode()); assertFalse(gbpMerchantAccount.isDefault()); } @Test public void createMultiCurrencyMerchantWithNoCurrenciesProvided() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("JPN"). paymentMethods(Arrays.asList("paypal")). payPalAccount(). clientId("paypal_client_id"). clientSecret("paypal_client_secret"). done(); Result<Merchant> result = gateway.merchant().create(request); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); assertEquals("name@email.com", result.getTarget().getEmail()); assertEquals("name@email.com", result.getTarget().getCompanyName()); assertEquals("JPN", result.getTarget().getCountryCodeAlpha3()); assertEquals("JP", result.getTarget().getCountryCodeAlpha2()); assertEquals("392", result.getTarget().getCountryCodeNumeric()); assertEquals("Japan", result.getTarget().getCountryName()); assertTrue(result.getTarget().getCredentials().getAccessToken().startsWith("access_token")); assertTrue(result.getTarget().getCredentials().getExpiresAt().after(Calendar.getInstance())); assertTrue(result.getTarget().getCredentials().getRefreshToken().startsWith("refresh_token")); assertEquals("bearer", result.getTarget().getCredentials().getTokenType()); assertEquals(1, result.getTarget().getMerchantAccounts().size()); assertEquals("JPY", result.getTarget().getMerchantAccounts().get(0).getId()); assertEquals("JPY", result.getTarget().getMerchantAccounts().get(0).getCurrencyIsoCode()); assertTrue(result.getTarget().getMerchantAccounts().get(0).isDefault()); } @Test public void createMultiCurrencyMerchantWithUnsupportedCountryAndNoCurrencies() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("YEM"). paymentMethods(Arrays.asList("paypal")). payPalAccount(). clientId("paypal_client_id"). clientSecret("paypal_client_secret"). done(); Result<Merchant> result = gateway.merchant().create(request); assertTrue(result.isSuccess()); assertTrue(result.getTarget().getId() != null && !result.getTarget().getId().isEmpty()); assertEquals("name@email.com", result.getTarget().getEmail()); assertEquals("name@email.com", result.getTarget().getCompanyName()); assertEquals("YEM", result.getTarget().getCountryCodeAlpha3()); assertEquals("YE", result.getTarget().getCountryCodeAlpha2()); assertEquals("887", result.getTarget().getCountryCodeNumeric()); assertEquals("Yemen", result.getTarget().getCountryName()); assertTrue(result.getTarget().getCredentials().getAccessToken().startsWith("access_token")); assertTrue(result.getTarget().getCredentials().getExpiresAt().after(Calendar.getInstance())); assertTrue(result.getTarget().getCredentials().getRefreshToken().startsWith("refresh_token")); assertEquals("bearer", result.getTarget().getCredentials().getTokenType()); assertEquals(1, result.getTarget().getMerchantAccounts().size()); assertEquals("USD", result.getTarget().getMerchantAccounts().get(0).getId()); assertEquals("USD", result.getTarget().getMerchantAccounts().get(0).getCurrencyIsoCode()); assertTrue(result.getTarget().getMerchantAccounts().get(0).isDefault()); } @Test public void returnErrorIfInvalidCurrencyPassed() { this.gateway = new BraintreeGateway("client_id$development$signup_client_id", "client_secret$development$signup_client_secret"); MerchantRequest request = new MerchantRequest(). email("name@email.com"). countryCodeAlpha3("USA"). paymentMethods(Arrays.asList("paypal")). currencies(Arrays.asList("GBP", "FAKE")). payPalAccount(). clientId("paypal_client_id"). clientSecret("paypal_client_secret"). done(); Result<Merchant> result = gateway.merchant().create(request); assertFalse(result.isSuccess()); assertEquals( ValidationErrorCode.MERCHANT_CURRENCIES_ARE_INVALID, result.getErrors().forObject("merchant").onField("currencies").get(0).getCode() ); } }
/* * Copyright (c) 2016 Goldman Sachs. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Eclipse Distribution License v. 1.0 which accompany this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. */ package org.eclipse.collections.impl.list.mutable; import java.io.Serializable; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.ListIterator; import java.util.Random; import java.util.concurrent.ExecutorService; import net.jcip.annotations.GuardedBy; import org.eclipse.collections.api.LazyIterable; import org.eclipse.collections.api.block.HashingStrategy; import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.block.function.Function2; import org.eclipse.collections.api.block.function.primitive.BooleanFunction; import org.eclipse.collections.api.block.function.primitive.ByteFunction; import org.eclipse.collections.api.block.function.primitive.CharFunction; import org.eclipse.collections.api.block.function.primitive.DoubleFunction; import org.eclipse.collections.api.block.function.primitive.FloatFunction; import org.eclipse.collections.api.block.function.primitive.IntFunction; import org.eclipse.collections.api.block.function.primitive.LongFunction; import org.eclipse.collections.api.block.function.primitive.ShortFunction; import org.eclipse.collections.api.block.predicate.Predicate; import org.eclipse.collections.api.block.predicate.Predicate2; import org.eclipse.collections.api.block.procedure.Procedure; import org.eclipse.collections.api.block.procedure.primitive.ObjectIntProcedure; import org.eclipse.collections.api.list.ImmutableList; import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.api.list.ParallelListIterable; import org.eclipse.collections.api.list.primitive.MutableBooleanList; import org.eclipse.collections.api.list.primitive.MutableByteList; import org.eclipse.collections.api.list.primitive.MutableCharList; import org.eclipse.collections.api.list.primitive.MutableDoubleList; import org.eclipse.collections.api.list.primitive.MutableFloatList; import org.eclipse.collections.api.list.primitive.MutableIntList; import org.eclipse.collections.api.list.primitive.MutableLongList; import org.eclipse.collections.api.list.primitive.MutableShortList; import org.eclipse.collections.api.multimap.list.MutableListMultimap; import org.eclipse.collections.api.ordered.OrderedIterable; import org.eclipse.collections.api.partition.list.PartitionMutableList; import org.eclipse.collections.api.stack.MutableStack; import org.eclipse.collections.api.tuple.Pair; import org.eclipse.collections.impl.collection.mutable.AbstractSynchronizedMutableCollection; import org.eclipse.collections.impl.collection.mutable.SynchronizedCollectionSerializationProxy; import org.eclipse.collections.impl.lazy.ReverseIterable; import org.eclipse.collections.impl.lazy.parallel.list.SynchronizedParallelListIterable; /** * A synchronized view of a {@link MutableList}. It is imperative that the user manually synchronize on the collection when iterating over it using the * standard JDK iterator or JDK 5 for loop, as per {@link Collections#synchronizedCollection(Collection)}. * * @see MutableList#asSynchronized() */ public class SynchronizedMutableList<T> extends AbstractSynchronizedMutableCollection<T> implements MutableList<T>, Serializable { private static final long serialVersionUID = 2L; SynchronizedMutableList(MutableList<T> newCollection) { super(newCollection); } SynchronizedMutableList(MutableList<T> newCollection, Object newLock) { super(newCollection, newLock); } /** * This method will take a MutableList and wrap it directly in a SynchronizedMutableList. It will * take any other non-GS-collection and first adapt it will a ListAdapter, and then return a * SynchronizedMutableList that wraps the adapter. */ public static <E, L extends List<E>> SynchronizedMutableList<E> of(L list) { MutableList<E> mutableList = list instanceof MutableList ? (MutableList<E>) list : ListAdapter.adapt(list); return new SynchronizedMutableList<>(mutableList); } /** * This method will take a MutableList and wrap it directly in a SynchronizedMutableList. It will * take any other non-GS-collection and first adapt it will a ListAdapter, and then return a * SynchronizedMutableList that wraps the adapter. Additionally, a developer specifies which lock to use * with the collection. */ public static <E, L extends List<E>> SynchronizedMutableList<E> of(L list, Object lock) { MutableList<E> mutableList = list instanceof MutableList ? (MutableList<E>) list : ListAdapter.adapt(list); return new SynchronizedMutableList<>(mutableList, lock); } @Override @GuardedBy("getLock()") protected MutableList<T> getDelegate() { return (MutableList<T>) super.getDelegate(); } @Override public MutableList<T> with(T element) { this.add(element); return this; } @Override public MutableList<T> without(T element) { this.remove(element); return this; } @Override public MutableList<T> withAll(Iterable<? extends T> elements) { this.addAllIterable(elements); return this; } @Override public MutableList<T> withoutAll(Iterable<? extends T> elements) { this.removeAllIterable(elements); return this; } @Override public MutableList<T> newEmpty() { synchronized (this.getLock()) { return this.getDelegate().newEmpty().asSynchronized(); } } @Override public MutableList<T> clone() { synchronized (this.getLock()) { return SynchronizedMutableList.of(this.getDelegate().clone()); } } protected Object writeReplace() { return new SynchronizedCollectionSerializationProxy<>(this.getDelegate()); } @Override public boolean addAll(int index, Collection<? extends T> collection) { synchronized (this.getLock()) { return this.getDelegate().addAll(index, collection); } } @Override public T set(int index, T element) { synchronized (this.getLock()) { return this.getDelegate().set(index, element); } } @Override public void add(int index, T element) { synchronized (this.getLock()) { this.getDelegate().add(index, element); } } @Override public T remove(int index) { synchronized (this.getLock()) { return this.getDelegate().remove(index); } } @Override public MutableList<T> subList(int fromIndex, int toIndex) { synchronized (this.getLock()) { return SynchronizedMutableList.of(this.getDelegate().subList(fromIndex, toIndex), this.getLock()); } } @Override public T get(int index) { synchronized (this.getLock()) { return this.getDelegate().get(index); } } @Override public int lastIndexOf(Object o) { synchronized (this.getLock()) { return this.getDelegate().lastIndexOf(o); } } @Override public ListIterator<T> listIterator() { return this.getDelegate().listIterator(); } @Override public ListIterator<T> listIterator(int index) { return this.getDelegate().listIterator(index); } @Override public ParallelListIterable<T> asParallel(ExecutorService executorService, int batchSize) { return new SynchronizedParallelListIterable<>(this.getDelegate().asParallel(executorService, batchSize), this.getLock()); } @Override public int binarySearch(T key, Comparator<? super T> comparator) { synchronized (this.getLock()) { return Collections.binarySearch(this, key, comparator); } } @Override public int binarySearch(T key) { synchronized (this.getLock()) { return Collections.binarySearch((List<? extends Comparable<? super T>>) this, key); } } @Override public int indexOf(Object o) { synchronized (this.getLock()) { return this.getDelegate().indexOf(o); } } @Override public MutableList<T> distinct() { synchronized (this.getLock()) { return this.getDelegate().distinct(); } } @Override public MutableList<T> distinct(HashingStrategy<? super T> hashingStrategy) { synchronized (this.getLock()) { return this.getDelegate().distinct(hashingStrategy); } } @Override public <S> boolean corresponds(OrderedIterable<S> other, Predicate2<? super T, ? super S> predicate) { synchronized (this.getLock()) { return this.getDelegate().corresponds(other, predicate); } } @Override public void forEach(int fromIndex, int toIndex, Procedure<? super T> procedure) { synchronized (this.getLock()) { this.getDelegate().forEach(fromIndex, toIndex, procedure); } } @Override public MutableList<T> takeWhile(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().takeWhile(predicate); } } @Override public void forEachWithIndex(int fromIndex, int toIndex, ObjectIntProcedure<? super T> objectIntProcedure) { synchronized (this.getLock()) { this.getDelegate().forEachWithIndex(fromIndex, toIndex, objectIntProcedure); } } @Override public MutableList<T> dropWhile(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().dropWhile(predicate); } } @Override public PartitionMutableList<T> partitionWhile(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().partitionWhile(predicate); } } @Override public int detectIndex(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().detectIndex(predicate); } } @Override public int detectLastIndex(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().detectLastIndex(predicate); } } @Override public MutableList<T> take(int count) { synchronized (this.getLock()) { return this.getDelegate().take(count); } } @Override public MutableList<T> drop(int count) { synchronized (this.getLock()) { return this.getDelegate().drop(count); } } @Override public void reverseForEach(Procedure<? super T> procedure) { synchronized (this.getLock()) { this.getDelegate().reverseForEach(procedure); } } @Override public MutableList<T> sortThis(Comparator<? super T> comparator) { synchronized (this.getLock()) { this.getDelegate().sortThis(comparator); return this; } } @Override public MutableList<T> sortThis() { synchronized (this.getLock()) { this.getDelegate().sortThis(); return this; } } @Override public <V extends Comparable<? super V>> MutableList<T> sortThisBy(Function<? super T, ? extends V> function) { synchronized (this.getLock()) { this.getDelegate().sortThisBy(function); return this; } } @Override public MutableList<T> sortThisByInt(IntFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByInt(function); return this; } } @Override public MutableList<T> sortThisByBoolean(BooleanFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByBoolean(function); return this; } } @Override public MutableList<T> sortThisByChar(CharFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByChar(function); return this; } } @Override public MutableList<T> sortThisByByte(ByteFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByByte(function); return this; } } @Override public MutableList<T> sortThisByShort(ShortFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByShort(function); return this; } } @Override public MutableList<T> sortThisByFloat(FloatFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByFloat(function); return this; } } @Override public MutableList<T> sortThisByLong(LongFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByLong(function); return this; } } @Override public MutableList<T> sortThisByDouble(DoubleFunction<? super T> function) { synchronized (this.getLock()) { this.getDelegate().sortThisByDouble(function); return this; } } @Override public MutableList<T> reverseThis() { synchronized (this.getLock()) { this.getDelegate().reverseThis(); return this; } } @Override public MutableList<T> shuffleThis() { synchronized (this.getLock()) { this.getDelegate().shuffleThis(); return this; } } @Override public MutableList<T> shuffleThis(Random rnd) { synchronized (this.getLock()) { this.getDelegate().shuffleThis(rnd); return this; } } @Override public LazyIterable<T> asReversed() { synchronized (this.getLock()) { return ReverseIterable.adapt(this); } } @Override public MutableList<T> toReversed() { synchronized (this.getLock()) { return this.getDelegate().toReversed(); } } @Override public MutableStack<T> toStack() { synchronized (this.getLock()) { return this.getDelegate().toStack(); } } @Override public ImmutableList<T> toImmutable() { synchronized (this.getLock()) { return this.getDelegate().toImmutable(); } } @Override public MutableList<T> tap(Procedure<? super T> procedure) { synchronized (this.getLock()) { this.forEach(procedure); return this; } } @Override public MutableList<T> select(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().select(predicate); } } @Override public <P> MutableList<T> selectWith( Predicate2<? super T, ? super P> predicate, P parameter) { synchronized (this.getLock()) { return this.getDelegate().selectWith(predicate, parameter); } } @Override public MutableList<T> reject(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().reject(predicate); } } @Override public <P> MutableList<T> rejectWith(Predicate2<? super T, ? super P> predicate, P parameter) { synchronized (this.getLock()) { return this.getDelegate().rejectWith(predicate, parameter); } } @Override public PartitionMutableList<T> partition(Predicate<? super T> predicate) { synchronized (this.getLock()) { return this.getDelegate().partition(predicate); } } @Override public <P> PartitionMutableList<T> partitionWith(Predicate2<? super T, ? super P> predicate, P parameter) { synchronized (this.getLock()) { return this.getDelegate().partitionWith(predicate, parameter); } } @Override public <S> MutableList<S> selectInstancesOf(Class<S> clazz) { synchronized (this.getLock()) { return this.getDelegate().selectInstancesOf(clazz); } } @Override public MutableBooleanList collectBoolean(BooleanFunction<? super T> booleanFunction) { synchronized (this.getLock()) { return this.getDelegate().collectBoolean(booleanFunction); } } @Override public MutableByteList collectByte(ByteFunction<? super T> byteFunction) { synchronized (this.getLock()) { return this.getDelegate().collectByte(byteFunction); } } @Override public MutableCharList collectChar(CharFunction<? super T> charFunction) { synchronized (this.getLock()) { return this.getDelegate().collectChar(charFunction); } } @Override public MutableDoubleList collectDouble(DoubleFunction<? super T> doubleFunction) { synchronized (this.getLock()) { return this.getDelegate().collectDouble(doubleFunction); } } @Override public MutableFloatList collectFloat(FloatFunction<? super T> floatFunction) { synchronized (this.getLock()) { return this.getDelegate().collectFloat(floatFunction); } } @Override public MutableIntList collectInt(IntFunction<? super T> intFunction) { synchronized (this.getLock()) { return this.getDelegate().collectInt(intFunction); } } @Override public MutableLongList collectLong(LongFunction<? super T> longFunction) { synchronized (this.getLock()) { return this.getDelegate().collectLong(longFunction); } } @Override public MutableShortList collectShort(ShortFunction<? super T> shortFunction) { synchronized (this.getLock()) { return this.getDelegate().collectShort(shortFunction); } } @Override public <V> MutableList<V> collect(Function<? super T, ? extends V> function) { synchronized (this.getLock()) { return this.getDelegate().collect(function); } } @Override public <P, V> MutableList<V> collectWith(Function2<? super T, ? super P, ? extends V> function, P parameter) { synchronized (this.getLock()) { return this.getDelegate().collectWith(function, parameter); } } @Override public <V> MutableList<V> collectIf( Predicate<? super T> predicate, Function<? super T, ? extends V> function) { synchronized (this.getLock()) { return this.getDelegate().collectIf(predicate, function); } } @Override public <V> MutableList<V> flatCollect(Function<? super T, ? extends Iterable<V>> function) { synchronized (this.getLock()) { return this.getDelegate().flatCollect(function); } } @Override public <V> MutableListMultimap<V, T> groupBy(Function<? super T, ? extends V> function) { synchronized (this.getLock()) { return this.getDelegate().groupBy(function); } } @Override public <V> MutableListMultimap<V, T> groupByEach(Function<? super T, ? extends Iterable<V>> function) { synchronized (this.getLock()) { return this.getDelegate().groupByEach(function); } } @Override public <S> MutableList<Pair<T, S>> zip(Iterable<S> that) { synchronized (this.getLock()) { return this.getDelegate().zip(that); } } @Override public MutableList<Pair<T, Integer>> zipWithIndex() { synchronized (this.getLock()) { return this.getDelegate().zipWithIndex(); } } @Override public MutableList<T> asUnmodifiable() { synchronized (this.getLock()) { return UnmodifiableMutableList.of(this); } } @Override public MutableList<T> asSynchronized() { return this; } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.backends.lwjgl; import java.awt.Canvas; import java.util.HashMap; import java.util.Map; import org.lwjgl.LWJGLException; import org.lwjgl.opengl.Display; import com.badlogic.gdx.Application; import com.badlogic.gdx.ApplicationListener; import com.badlogic.gdx.Audio; import com.badlogic.gdx.Files; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input; import com.badlogic.gdx.LifecycleListener; import com.badlogic.gdx.Net; import com.badlogic.gdx.Preferences; import com.badlogic.gdx.backends.openal.OpenALAudio; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.Clipboard; import com.badlogic.gdx.utils.GdxRuntimeException; /** An OpenGL surface fullscreen or in a lightweight window. */ public class LwjglApplication implements Application { protected final LwjglGraphics graphics; protected OpenALAudio audio; protected final LwjglFiles files; protected final LwjglInput input; protected final LwjglNet net; protected final ApplicationListener listener; protected Thread mainLoopThread; protected boolean running = true; protected final Array<Runnable> runnables = new Array(); protected final Array<Runnable> executedRunnables = new Array(); protected final Array<LifecycleListener> lifecycleListeners = new Array<LifecycleListener>(); protected int logLevel = LOG_INFO; public LwjglApplication (ApplicationListener listener, String title, int width, int height, boolean useGL2) { this(listener, createConfig(title, width, height, useGL2)); } public LwjglApplication (ApplicationListener listener) { this(listener, null, 640, 480, false); } public LwjglApplication (ApplicationListener listener, LwjglApplicationConfiguration config) { this(listener, config, new LwjglGraphics(config)); } public LwjglApplication (ApplicationListener listener, boolean useGL2, Canvas canvas) { this(listener, new LwjglApplicationConfiguration(), new LwjglGraphics(canvas, useGL2)); } public LwjglApplication (ApplicationListener listener, LwjglApplicationConfiguration config, Canvas canvas) { this(listener, config, new LwjglGraphics(canvas, config)); } public LwjglApplication (ApplicationListener listener, LwjglApplicationConfiguration config, LwjglGraphics graphics) { LwjglNativesLoader.load(); if (config.title == null) config.title = listener.getClass().getSimpleName(); this.graphics = graphics; if (!LwjglApplicationConfiguration.disableAudio) audio = new OpenALAudio(config.audioDeviceSimultaneousSources, config.audioDeviceBufferCount, config.audioDeviceBufferSize); files = new LwjglFiles(); input = new LwjglInput(); net = new LwjglNet(); this.listener = listener; Gdx.app = this; Gdx.graphics = graphics; Gdx.audio = audio; Gdx.files = files; Gdx.input = input; Gdx.net = net; initialize(); } private static LwjglApplicationConfiguration createConfig (String title, int width, int height, boolean useGL2) { LwjglApplicationConfiguration config = new LwjglApplicationConfiguration(); config.title = title; config.width = width; config.height = height; config.useGL20 = useGL2; config.vSyncEnabled = true; return config; } private void initialize () { mainLoopThread = new Thread("LWJGL Application") { @Override public void run () { graphics.setVSync(graphics.config.vSyncEnabled); try { LwjglApplication.this.mainLoop(); } catch (Throwable t) { if (audio != null) audio.dispose(); if (t instanceof RuntimeException) throw (RuntimeException)t; else throw new GdxRuntimeException(t); } } }; mainLoopThread.start(); } void mainLoop () { Array<LifecycleListener> lifecycleListeners = this.lifecycleListeners; try { graphics.setupDisplay(); } catch (LWJGLException e) { throw new GdxRuntimeException(e); } listener.create(); listener.resize(graphics.getWidth(), graphics.getHeight()); graphics.resize = false; int lastWidth = graphics.getWidth(); int lastHeight = graphics.getHeight(); graphics.lastTime = System.nanoTime(); boolean wasActive = true; while (running) { Display.processMessages(); if (Display.isCloseRequested()) exit(); boolean isActive = Display.isActive(); if (wasActive && !isActive) { // if it's just recently minimized from active state wasActive = false; synchronized (lifecycleListeners) { for (LifecycleListener listener : lifecycleListeners) listener.pause(); } listener.pause(); } if (!wasActive && isActive) { // if it's just recently focused from minimized state wasActive = true; listener.resume(); synchronized (lifecycleListeners) { for (LifecycleListener listener : lifecycleListeners) listener.resume(); } } boolean shouldRender = false; if (graphics.canvas != null) { int width = graphics.canvas.getWidth(); int height = graphics.canvas.getHeight(); if (lastWidth != width || lastHeight != height) { lastWidth = width; lastHeight = height; Gdx.gl.glViewport(0, 0, lastWidth, lastHeight); listener.resize(lastWidth, lastHeight); shouldRender = true; } } else { graphics.config.x = Display.getX(); graphics.config.y = Display.getY(); if (graphics.resize || Display.wasResized() || Display.getWidth() != graphics.config.width || Display.getHeight() != graphics.config.height) { graphics.resize = false; Gdx.gl.glViewport(0, 0, Display.getWidth(), Display.getHeight()); graphics.config.width = Display.getWidth(); graphics.config.height = Display.getHeight(); if (listener != null) listener.resize(Display.getWidth(), Display.getHeight()); graphics.requestRendering(); } } if (executeRunnables()) shouldRender = true; // If one of the runnables set running to false, for example after an exit(). if (!running) break; input.update(); shouldRender |= graphics.shouldRender(); input.processEvents(); if (audio != null) audio.update(); if (!isActive && graphics.config.backgroundFPS == -1) shouldRender = false; int frameRate = isActive ? graphics.config.foregroundFPS : graphics.config.backgroundFPS; if (shouldRender) { graphics.updateTime(); listener.render(); Display.update(false); } else { // Sleeps to avoid wasting CPU in an empty loop. if (frameRate == -1) frameRate = 10; if (frameRate == 0) frameRate = graphics.config.backgroundFPS; if (frameRate == 0) frameRate = 30; } if (frameRate > 0) Display.sync(frameRate); } synchronized (lifecycleListeners) { for (LifecycleListener listener : lifecycleListeners) { listener.pause(); listener.dispose(); } } listener.pause(); listener.dispose(); Display.destroy(); if (audio != null) audio.dispose(); if (graphics.config.forceExit) System.exit(-1); } public boolean executeRunnables () { synchronized (runnables) { executedRunnables.addAll(runnables); runnables.clear(); } if (executedRunnables.size == 0) return false; for (int i = 0; i < executedRunnables.size; i++) executedRunnables.get(i).run(); executedRunnables.clear(); return true; } @Override public ApplicationListener getApplicationListener () { return listener; } @Override public Audio getAudio () { return audio; } @Override public Files getFiles () { return files; } @Override public LwjglGraphics getGraphics () { return graphics; } @Override public Input getInput () { return input; } @Override public Net getNet () { return net; } @Override public ApplicationType getType () { return ApplicationType.Desktop; } @Override public int getVersion () { return 0; } public void stop () { running = false; try { mainLoopThread.join(); } catch (Exception ex) { } } @Override public long getJavaHeap () { return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); } @Override public long getNativeHeap () { return getJavaHeap(); } Map<String, Preferences> preferences = new HashMap<String, Preferences>(); @Override public Preferences getPreferences (String name) { if (preferences.containsKey(name)) { return preferences.get(name); } else { Preferences prefs = new LwjglPreferences(name); preferences.put(name, prefs); return prefs; } } @Override public Clipboard getClipboard () { return new LwjglClipboard(); } @Override public void postRunnable (Runnable runnable) { synchronized (runnables) { runnables.add(runnable); Gdx.graphics.requestRendering(); } } @Override public void debug (String tag, String message) { if (logLevel >= LOG_DEBUG) { System.out.println(tag + ": " + message); } } @Override public void debug (String tag, String message, Throwable exception) { if (logLevel >= LOG_DEBUG) { System.out.println(tag + ": " + message); exception.printStackTrace(System.out); } } @Override public void log (String tag, String message) { if (logLevel >= LOG_INFO) { System.out.println(tag + ": " + message); } } @Override public void log (String tag, String message, Exception exception) { if (logLevel >= LOG_INFO) { System.out.println(tag + ": " + message); exception.printStackTrace(System.out); } } @Override public void error (String tag, String message) { if (logLevel >= LOG_ERROR) { System.err.println(tag + ": " + message); } } @Override public void error (String tag, String message, Throwable exception) { if (logLevel >= LOG_ERROR) { System.err.println(tag + ": " + message); exception.printStackTrace(System.err); } } @Override public void setLogLevel (int logLevel) { this.logLevel = logLevel; } @Override public void exit () { postRunnable(new Runnable() { @Override public void run () { running = false; } }); } @Override public void addLifecycleListener (LifecycleListener listener) { synchronized (lifecycleListeners) { lifecycleListeners.add(listener); } } @Override public void removeLifecycleListener (LifecycleListener listener) { synchronized (lifecycleListeners) { lifecycleListeners.removeValue(listener, true); } } }
package se.z_app.zmote.gui; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Observable; import java.util.Observer; import com.actionbarsherlock.internal.nineoldandroids.animation.Animator; import com.actionbarsherlock.internal.nineoldandroids.animation.Animator.AnimatorListener; import com.actionbarsherlock.internal.nineoldandroids.animation.AnimatorSet; import com.actionbarsherlock.internal.nineoldandroids.animation.ObjectAnimator; import se.z_app.stb.Channel; import se.z_app.stb.Program; import se.z_app.stb.EPG; import se.z_app.stb.api.RemoteControl; import se.z_app.stb.api.RemoteControl.Button; import se.z_app.zmote.epg.EPGContentHandler; import se.z_app.zmote.epg.EPGQuery; import android.content.Intent; import android.gesture.GestureOverlayView; import android.gesture.GestureOverlayView.OnGestureListener; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.RelativeLayout.LayoutParams; import android.widget.TextView; /** * A fragment containing the main view (the view were one can scroll between channels) * @author Rasmus Holm */ public class MainViewFragment extends Fragment implements OnGestureListener, Observer{ private MainTabActivity main; private View v; private ImageView left; private ImageView leftleft; private ImageView center; private ImageView right; private ImageView rightright; private int screenwidth; private int screenheight; private int imagewidth = -1; private int imagehight = -1; private RelativeLayout r; private float alpha = 0.25F; private float middleAlpha = 0.5F; private float defaultAlpha = 0.7F; private boolean posVar = false; private float leftleftX; private float leftleftY; private float leftleftScale; private float leftX; private float leftY; private float leftScale; private float centerX; private float centerY; private float centerScale; private float rightX; private float rightY; private float rightScale; private float rightrightX; private float rightrightY; private float rightrightScale; private boolean isAnimationRunning = false; private int defaultAnimationDuration = 400; private int animationDuration; private ProgressBar programProgress; private TextView programText; private RelativeLayout programWrapper; private TextView channelName; private Program currentProgram; private ArrayList<ImageView> imageList = new ArrayList<ImageView>(); private ArrayList<Channel> channelList = new ArrayList<Channel>(); private int currentChannelNr; private long lastChannelChange = System.currentTimeMillis(); /** * Constructor for the MainViewFragment, * @param main the tab */ public MainViewFragment(MainTabActivity main){ this.main = main; } int tmp; /** * Update of the channels information and state. */ @Override public void update(Observable observable, Object data) { Channel channel = EPGContentHandler.instance().getCurrentChannel(); long timeDiff = System.currentTimeMillis()-lastChannelChange; if(timeDiff > 500){ if (!isAnimationRunning) { for(int i = 0; i< channelList.size(); i++){ if(channelList.get(i).getUrl().contains(channel.getUrl())){ tmp = i; main.runOnUiThread(new Runnable() { int i = tmp; @Override public void run() { if(Math.abs(currentChannelNr-i) < 3){ rotateToChannel(i, defaultAnimationDuration); }else{ rotateToChannel(i, defaultAnimationDuration/2); } } }); break; } } } } } boolean clicked = false; float threshhold = 20; boolean activeGest = false; /** * Handles the tactile gestures. * @param overlay * @param event */ @Override public void onGesture(GestureOverlayView overlay, MotionEvent event) { if(activeGest){ float currentX = event.getX(); float currentY = event.getY(); float dirX = x-currentX; float dirY = y-currentY; if(Math.abs(dirX) > threshhold || Math.abs(dirY) > threshhold ){ activeGest = false; if(Math.abs(dirX) > Math.abs(dirY)){ //Changing Channel from swipe if(dirX < 0 ) setChannel(currentChannelNr-1, defaultAnimationDuration); else if (dirX > 0) setChannel(currentChannelNr+1, defaultAnimationDuration); }else{ //Changing Volume from swipe if(dirY < 0 ) RemoteControl.instance().sendButton(Button.VOLMINUS); else if (dirY > 0) RemoteControl.instance().sendButton(Button.VOLPLUS); } } } } /** * Gesture cancelled * @param overlay * @param event */ @Override public void onGestureCancelled(GestureOverlayView overlay, MotionEvent event) { activeGest = false; } /** * Gesture ended * @param overlay * @param event */ @Override public void onGestureEnded(GestureOverlayView overlay, MotionEvent event) { activeGest= false; } float x; float y; /** * Gesture started * @param overlay * @param event */ @Override public void onGestureStarted(GestureOverlayView overlay, MotionEvent event) { activeGest = true; x = event.getX(); y = event.getY(); } /** * Destroy of the view */ @Override public void onDestroy() { super.onDestroy(); EPGContentHandler.instance().deleteObserver(this); } /** * Creation of the view * @param inflater * @param container * @param savedInstanceState */ @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { EPGContentHandler.instance().addObserver(this); v = inflater.inflate(R.layout.fragment_main_view, null); r = (RelativeLayout)v.findViewById(R.id.rellativIconSpinner); GestureOverlayView gestures = (GestureOverlayView) v.findViewById(R.id.gestures); gestures.addOnGestureListener(this); new AsyncDataLoader().execute(); return v; } /** * A setter for the channel * @param target The channel to be set * @return A boolean, false if animation is running, true otherwise */ public boolean setChannel(Channel target, int duration){ if (!isAnimationRunning) { for(int i = 0; i< channelList.size(); i++){ if(channelList.get(i).getUrl().contains(target.getUrl())){ setChannel(i, duration); return true; } } } return false; } /** * A setter for the channel * @param channelNr The channelNr to be set * @param duration */ public void setChannel(int channelNr, int duration){ lastChannelChange = System.currentTimeMillis(); if(currentChannelNr == channelNr || imageList.size() == 0){ return; } int size = imageList.size(); channelNr = (channelNr+size)%size; rotateToChannel(channelNr, duration); Channel channel = channelList.get(channelNr); RemoteControl.instance().launch(channel); } /** * Rotates the view to the indicated channel * @param channelNr the channel number of the channel to rotate to * @param duration */ public void rotateToChannel(int channelNr, int duration){ if(currentChannelNr == channelNr){ return; } int can1 = 0; int can2 = 0; int fin = 0; int size = imageList.size(); channelNr = (channelNr+size)%size; for(int i = 0; i< size; i++){ if((can1+currentChannelNr)%size == channelNr){ fin = can1; break; } if((currentChannelNr-can2+size)%size == channelNr){ fin = -can2; break; } can2++; can1++; } if(fin < 0){ rotateRight(fin*(-1), duration); }else{ rotateLeft(fin, duration); } } /** * The same as calling rotateRight(1) */ private void rotateRight(){ rotateRight(1, defaultAnimationDuration); } private int tmpInt; /** * Rotates the view to the right the indicated number of turns * @param turns the number of turns to rotate the view */ private void rotateRight(int turns, int duration){ animationDuration = duration; if(isAnimationRunning){ return; } isAnimationRunning = true; if(!posVar){ setVariables(); posVar = true; } currentChannelNr = (currentChannelNr+imageList.size()-1)%imageList.size(); ImageView newLeft = imageList.get((currentChannelNr+imageList.size()-2)%imageList.size()); newLeft.setVisibility(View.INVISIBLE); ObjectAnimator.ofFloat(newLeft, "x", -300).setDuration(0).start(); ObjectAnimator.ofFloat(newLeft, "y", -300).setDuration(0).start(); newLeft.setVisibility(View.VISIBLE); try{ r.addView(newLeft); }catch(RuntimeException e){} AnimatorSet animatorSet = new AnimatorSet(); animatorSet.playTogether( ObjectAnimator.ofFloat(rightright, "x", rightrightX + 300), ObjectAnimator.ofFloat(rightright, "y", -300) ); tmpInt = turns; animatorSet.addListener(new AnimatorListener() { ImageView tmp = rightright; int turns = tmpInt; @Override public void onAnimationStart(Animator animation) { isAnimationRunning = true; hideText(); } @Override public void onAnimationRepeat(Animator animation) {} @Override public void onAnimationEnd(Animator animation) { r.removeView(tmp); isAnimationRunning = false; turns--; if(turns>0){ rotateRight(turns, animationDuration); }else{ showText(); } } @Override public void onAnimationCancel(Animator animation) { r.removeView(tmp); isAnimationRunning = false; turns--; if(turns>0){ rotateRight(turns, animationDuration); }else{ showText(); } } }); animatorSet.playTogether( ObjectAnimator.ofFloat(right, "x", rightrightX), ObjectAnimator.ofFloat(right, "y", rightrightY), ObjectAnimator.ofFloat(right, "scaleX", rightrightScale), ObjectAnimator.ofFloat(right, "scaleY", rightrightScale), ObjectAnimator.ofFloat(right, "alpha", defaultAlpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(center, "x", rightX), ObjectAnimator.ofFloat(center, "y", rightY), ObjectAnimator.ofFloat(center, "scaleX", rightScale), ObjectAnimator.ofFloat(center, "scaleY", rightScale), ObjectAnimator.ofFloat(center, "alpha", middleAlpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(left, "x", centerX), ObjectAnimator.ofFloat(left, "y", centerY), ObjectAnimator.ofFloat(left, "scaleX", centerScale), ObjectAnimator.ofFloat(left, "scaleY", centerScale), ObjectAnimator.ofFloat(left, "alpha", alpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(leftleft, "x", leftX), ObjectAnimator.ofFloat(leftleft, "y", leftY), ObjectAnimator.ofFloat(leftleft, "scaleX", leftScale), ObjectAnimator.ofFloat(leftleft, "scaleY", leftScale), ObjectAnimator.ofFloat(leftleft, "alpha", middleAlpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(newLeft, "x", leftleftX), ObjectAnimator.ofFloat(newLeft, "y", leftleftY), ObjectAnimator.ofFloat(newLeft, "scaleX", leftleftScale), ObjectAnimator.ofFloat(newLeft, "scaleY", leftleftScale), ObjectAnimator.ofFloat(newLeft, "alpha", defaultAlpha) ); //Playing animation animatorSet.setDuration(animationDuration).start(); rightright = right; right = center; center = left; left = leftleft; leftleft= newLeft; } /** * The same as calling rotateLeft(1) */ private void rotateLeft(){ rotateLeft(1, defaultAnimationDuration); } /** * Rotates the view to the left the indicated number of turns * @param turns the number of turns to rotate the view */ private void rotateLeft(int turns, int duration){ animationDuration = duration; if(isAnimationRunning){ return; } isAnimationRunning = true; if(!posVar){ setVariables(); posVar = true; } currentChannelNr = (currentChannelNr+1)%imageList.size(); ImageView newRight = imageList.get((currentChannelNr+2)%imageList.size()); newRight.setVisibility(View.INVISIBLE); ObjectAnimator.ofFloat(newRight, "x", (rightX + 300)).setDuration(0).start(); ObjectAnimator.ofFloat(newRight, "y", -300).setDuration(0).start(); newRight.setVisibility(View.VISIBLE); try{ r.addView(newRight); }catch(RuntimeException e){} AnimatorSet animatorSet = new AnimatorSet(); animatorSet.playTogether( ObjectAnimator.ofFloat(leftleft, "x", -300), ObjectAnimator.ofFloat(leftleft, "y", -300) ); tmpInt = turns; animatorSet.addListener(new AnimatorListener() { ImageView tmp = leftleft; int turns = tmpInt; @Override public void onAnimationStart(Animator animation) { isAnimationRunning = true; hideText(); } @Override public void onAnimationRepeat(Animator animation) {} @Override public void onAnimationEnd(Animator animation) { r.removeView(tmp); isAnimationRunning = false; turns--; if(turns>0){ rotateLeft(turns, animationDuration); }else{ showText(); } } @Override public void onAnimationCancel(Animator animation) { r.removeView(tmp); isAnimationRunning = false; turns--; if(turns>0){ rotateLeft(turns, animationDuration); }else{ showText(); } } }); animatorSet.playTogether( ObjectAnimator.ofFloat(left, "x", leftleftX), ObjectAnimator.ofFloat(left, "y", leftleftY), ObjectAnimator.ofFloat(left, "scaleX", leftleftScale), ObjectAnimator.ofFloat(left, "scaleY", leftleftScale), ObjectAnimator.ofFloat(left, "alpha", defaultAlpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(center, "x", leftX), ObjectAnimator.ofFloat(center, "y", leftY), ObjectAnimator.ofFloat(center, "scaleX", leftScale), ObjectAnimator.ofFloat(center, "scaleY", leftScale), ObjectAnimator.ofFloat(center, "alpha", middleAlpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(right, "x", centerX), ObjectAnimator.ofFloat(right, "y", centerY), ObjectAnimator.ofFloat(right, "scaleX", centerScale), ObjectAnimator.ofFloat(right, "scaleY", centerScale), ObjectAnimator.ofFloat(right, "alpha", alpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(rightright, "x", rightX), ObjectAnimator.ofFloat(rightright, "y", rightY), ObjectAnimator.ofFloat(rightright, "scaleX", rightScale), ObjectAnimator.ofFloat(rightright, "scaleY", rightScale), ObjectAnimator.ofFloat(rightright, "alpha", middleAlpha) ); animatorSet.playTogether( ObjectAnimator.ofFloat(newRight, "x", rightrightX), ObjectAnimator.ofFloat(newRight, "y", rightrightY), ObjectAnimator.ofFloat(newRight, "scaleX", rightrightScale), ObjectAnimator.ofFloat(newRight, "scaleY", rightrightScale), ObjectAnimator.ofFloat(newRight, "alpha", defaultAlpha) ); //Playing Animation animatorSet.setDuration(animationDuration).start(); leftleft = left; left = center; center = right; right = rightright; rightright = newRight; } /** * Builds the view for the current channel */ private void buildForCurrentChannel(){ if(imagewidth == 0){ imagewidth = 96; } int padding = 10; if(imageList == null || imageList.isEmpty()){ new AsyncDataLoader().execute(); return; } leftleft = imageList.get((currentChannelNr+imageList.size()-2)%imageList.size()); left = imageList.get((currentChannelNr+imageList.size()-1)%imageList.size()); center = imageList.get(currentChannelNr); right = imageList.get((1+currentChannelNr)%imageList.size()); rightright = imageList.get((2+currentChannelNr)%imageList.size()); screenwidth = r.getMeasuredWidth(); screenheight = r.getMeasuredHeight(); Log.i("Screen", "Screen width = " + screenwidth); Log.i("Screen", "Screen height = " + screenheight); Log.i("Screen", "Image width = " + imagewidth); Log.i("Screen", "Image hight = " + imagehight); //LeftLeft leftleftScale = 5*screenwidth/imagewidth/19; //screenheight/imagehight/6; LayoutParams params1 = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params1.addRule(RelativeLayout.ALIGN_PARENT_LEFT); params1.setMargins((int)(imagewidth*(leftleftScale-1)/2), (int)(imagehight*(leftleftScale-1)/2), 0, 0); leftleft.setLayoutParams(params1); ObjectAnimator.ofFloat(leftleft, "scaleX", leftleftScale).setDuration(0).start(); ObjectAnimator.ofFloat(leftleft, "scaleY", leftleftScale).setDuration(0).start(); ObjectAnimator.ofFloat(leftleft, "alpha", defaultAlpha).setDuration(0).start(); //Left leftScale = 3*(float)screenwidth/(float)imagewidth/(float)7; params1 = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params1.addRule(RelativeLayout.ALIGN_PARENT_LEFT); params1.setMargins((int)(imagewidth*leftScale/4), (int)((imagehight*leftScale/2)+imagehight*leftleftScale/2+padding), 0, 0); left.setLayoutParams(params1); ObjectAnimator.ofFloat(left, "scaleX", leftScale).setDuration(0).start(); ObjectAnimator.ofFloat(left, "scaleY", leftScale).setDuration(0).start(); ObjectAnimator.ofFloat(left, "alpha", middleAlpha).setDuration(0).start(); //Center centerScale = (float) (screenwidth/imagewidth); int centerHight = (int)(imagehight*centerScale); params1 = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params1.addRule(RelativeLayout.CENTER_HORIZONTAL); params1.addRule(RelativeLayout.ABOVE, R.id.imageVolDown); int voldownHight = v.findViewById(R.id.imageVolDown).getMeasuredHeight(); params1.setMargins(0, 0, 0, (int)(voldownHight + (int)((imagehight*(centerScale)/4)))); center.setLayoutParams(params1); ObjectAnimator.ofFloat(center, "scaleX", centerScale).setDuration(0).start(); ObjectAnimator.ofFloat(center, "scaleY", centerScale).setDuration(0).start(); ObjectAnimator.ofFloat(center, "alpha", alpha).setDuration(0).start(); //RightRight rightrightScale = 5*screenwidth/imagewidth/19; params1 = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params1.setMargins(0, (int)(imagehight*(rightrightScale-1)/2), (int)(imagewidth*(rightrightScale-1)/2), 0); params1.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); rightright.setLayoutParams(params1); ObjectAnimator.ofFloat(rightright, "scaleX", rightrightScale).setDuration(0).start(); ObjectAnimator.ofFloat(rightright, "scaleY", rightrightScale ).setDuration(0).start(); ObjectAnimator.ofFloat(rightright, "alpha", defaultAlpha).setDuration(0).start(); //Right rightScale = 3*(float)screenwidth/(float)imagewidth/(float)7; params1 = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params1.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); params1.setMargins(0, (int)((imagehight*rightScale/2)+imagehight*rightrightScale/2+padding), (int)(imagewidth*(rightScale-1)/2), 0); right.setLayoutParams(params1); ObjectAnimator.ofFloat(right, "scaleX", rightScale).setDuration(0).start(); ObjectAnimator.ofFloat(right, "scaleY", rightScale ).setDuration(0).start(); ObjectAnimator.ofFloat(right, "alpha", middleAlpha).setDuration(0).start(); r.addView(leftleft); r.addView(left); r.addView(center); r.addView(right); r.addView(rightright); programWrapper = new RelativeLayout(r.getContext()); params1 = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); params1.setMargins(20, screenheight-centerHight-voldownHight+2*padding, 20, voldownHight); programWrapper.setLayoutParams(params1); LinearLayout wrapper = new LinearLayout(r.getContext()); wrapper.setOrientation(LinearLayout.VERTICAL); channelName = new TextView(v.getContext()); channelName.setTextColor(0xFFFFFFFF); ImageButton socialIcon = new ImageButton(r.getContext()); socialIcon.setBackgroundDrawable(r.getResources().getDrawable(R.drawable.social_icon_light)); params1 = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params1.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); params1.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); socialIcon.setLayoutParams(params1); socialIcon.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if(currentProgram != null){ main.vibrate(); ZChatActivity.targetProgram = currentProgram; Intent intent = new Intent(getActivity(), ZChatActivity.class); getActivity().startActivity(intent); } } }); programProgress = new ProgressBar(v.getContext(), null, android.R.attr.progressBarStyleHorizontal); programText = new TextView(v.getContext()); programText.setTextColor(0xFFFFFFFF); showText(); wrapper.addView(channelName); wrapper.addView(programProgress); wrapper.addView(programText); programWrapper.addView(wrapper); programWrapper.addView(socialIcon); r.addView(programWrapper); } /** * Hides text */ private void hideText(){ programWrapper.setVisibility(View.INVISIBLE); } /** * Shows text */ private void showText(){ String t = ""; Channel channel = channelList.get(currentChannelNr); channelName.setText(channel.getName()); Program currentProgram = null; Program nextProgram = null; Program nextNextProgram = null; Date now = new Date(System.currentTimeMillis()); for (Program program : channel) { if(now.compareTo(program.getStart()) >= 0) currentProgram = program; else if(nextProgram == null){ nextProgram = program; }else{ nextNextProgram = program; break; } } if(currentProgram == null) return; this.currentProgram = currentProgram; String name = currentProgram.getName(); String startTime = new SimpleDateFormat("HH:mm").format(currentProgram.getStart()); String info = currentProgram.getLongText(); name = trimString(name, 29); info = trimString(info, 260); t = "> " + startTime + " - " + name + "\n" ; t += info + "\n\n"; if(nextProgram != null){ String nextName = nextProgram.getName(); String nextStartTime = new SimpleDateFormat("HH:mm").format(nextProgram.getStart()); nextName = trimString(nextName, 29); t += "> " + nextStartTime + " - " + nextName + "\n"; } if(nextNextProgram != null){ String nextNextName = nextNextProgram.getName(); String nextNextStartTime = new SimpleDateFormat("HH:mm").format(nextNextProgram.getStart()); nextNextName = trimString(nextNextName, 29); t += "> " + nextNextStartTime + " - " + nextNextName; } programText.setText(t); long startMilliTime = currentProgram.getStart().getTime(); long endMilliTime = startMilliTime + currentProgram.getDuration()*1000; long nowMilliTime = System.currentTimeMillis(); programProgress.setMax((int)(endMilliTime-startMilliTime)); programProgress.setProgress((int)(nowMilliTime-startMilliTime)); programWrapper.setVisibility(View.VISIBLE); programWrapper.bringToFront(); } /** * Splits strings in to shorter strings */ private String trimString(String s, int max){ if(s.length() > max){ s = s.substring(0, max-4) + "..."; } return s; } /** * Set the variables values */ private void setVariables(){ leftleftX = leftleft.getLeft(); leftleftY = leftleft.getTop(); leftX = left.getLeft(); leftY = left.getTop(); centerX = center.getLeft(); centerY = center.getTop(); rightX = right.getLeft(); rightY = right.getTop(); rightrightX = rightright.getLeft(); rightrightY = rightright.getTop(); } /** * Asynchronous data loader. */ private class AsyncDataLoader extends AsyncTask<Integer, Integer, EPG>{ @Override protected EPG doInBackground(Integer... params) { EPGQuery query = new EPGQuery(); query.getEPG(); query.getCurrentChannel(); return query.getEPG(); } @Override protected void onPostExecute(EPG epg) { if(epg == null){ return; } v.findViewById(R.id.progressLoadingEPG).setVisibility(View.INVISIBLE); currentChannelNr = 0; for (Channel channel : epg) { Drawable draw = new BitmapDrawable(channel.getIcon()); ImageView i = new ImageView(v.getContext()); i.setImageDrawable(draw); i.setBackgroundColor(0xFFFFFFFF ); i.setAdjustViewBounds(true); int width = draw.getMinimumWidth(); if(imagewidth == -1 && width > 0){ imagewidth = width; }else if(width > 0){ imagewidth = (imagewidth+width)/2; } int hight = draw.getMinimumHeight(); if(imagehight == -1 && hight > 0){ imagehight = hight; }else if(hight > 0){ imagehight = (imagehight+hight)/2; } i.invalidate(); imageList.add(i); channelList.add(channel); i.setOnClickListener(new View.OnClickListener() { int i = currentChannelNr; @Override public void onClick(View v) { if(i != currentChannelNr && !isAnimationRunning){ main.vibrate(); setChannel(i, defaultAnimationDuration); } } }); currentChannelNr++; } EPGQuery query = new EPGQuery(); Channel target = query.getCurrentChannel(); currentChannelNr = 0; if(target != null){ for(int i = 0; i< channelList.size(); i++){ if(channelList.get(i) != null) if(channelList.get(i).getUrl() != null) if(target.getUrl().contains(channelList.get(i).getUrl())){ currentChannelNr = i; break; } } } buildForCurrentChannel(); } } }
/* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.axis.wsdl.toJava; import org.apache.axis.utils.JavaUtils; import org.apache.axis.utils.Messages; import org.apache.axis.wsdl.symbolTable.TypeEntry; import java.io.IOException; import java.io.PrintWriter; import java.util.Vector; /** * This is Wsdl2java's Complex Type Writer. It writes the <typeName>.java file. */ public class JavaEnumTypeWriter extends JavaClassWriter { /** Field elements */ private Vector elements; /** Field type */ private TypeEntry type; /** * Constructor. * * @param emitter * @param type * @param elements */ protected JavaEnumTypeWriter(Emitter emitter, TypeEntry type, Vector elements) { super(emitter, type.getName(), "enumType"); this.elements = elements; this.type = type; } // ctor /** * Return "implements java.io.Serializable ". * * @return */ protected String getImplementsText() { return "implements java.io.Serializable "; } // getImplementsText /** * Generate the binding for the given enumeration type. * The values vector contains the base type (first index) and * the values (subsequent Strings) * * @param pw * @throws IOException */ protected void writeFileBody(PrintWriter pw) throws IOException { // Get the java name of the type String javaName = getClassName(); // The first index is the base type. // The base type could be a non-object, if so get the corresponding Class. String baseType = ((TypeEntry) elements.get(0)).getName(); String baseClass = baseType; if (baseType.indexOf("int") == 0) { baseClass = "java.lang.Integer"; } else if (baseType.indexOf("char") == 0) { baseClass = "java.lang.Character"; } else if (baseType.indexOf("short") == 0) { baseClass = "java.lang.Short"; } else if (baseType.indexOf("long") == 0) { baseClass = "java.lang.Long"; } else if (baseType.indexOf("double") == 0) { baseClass = "java.lang.Double"; } else if (baseType.indexOf("float") == 0) { baseClass = "java.lang.Float"; } else if (baseType.indexOf("byte") == 0) { baseClass = "java.lang.Byte"; } // Create a list of the literal values. Vector values = new Vector(); for (int i = 1; i < elements.size(); i++) { String value = (String) elements.get(i); if (baseClass.equals("java.lang.String")) { value = "\"" + value + "\""; // Surround literal with double quotes } else if (baseClass.equals("java.lang.Character")) { value = "'" + value + "'"; } else if (baseClass.equals("java.lang.Float")) { if (!value.endsWith("F") && // Indicate float literal so javac !value.endsWith( "f")) { // doesn't complain about precision. value += "F"; } } else if (baseClass.equals("java.lang.Long")) { if (!value.endsWith("L") && // Indicate float literal so javac !value.endsWith( "l")) { // doesn't complain about precision. value += "L"; } } else if (baseClass.equals("javax.xml.namespace.QName")) { value = org.apache.axis.wsdl.symbolTable.Utils.getQNameFromPrefixedName(type.getNode(), value).toString(); value = "javax.xml.namespace.QName.valueOf(\"" + value + "\")"; } else if (baseClass.equals(baseType)) { // Construct baseClass object with literal string value = "new " + baseClass + "(\"" + value + "\")"; } values.add(value); } // Create a list of ids Vector ids = getEnumValueIds(elements); // Each object has a private _value_ variable to store the base value pw.println(" private " + baseType + " _value_;"); // The enumeration values are kept in a hashtable pw.println( " private static java.util.HashMap _table_ = new java.util.HashMap();"); pw.println(""); // A protected constructor is used to create the static enumeration values pw.println(" // " + Messages.getMessage("ctor00")); pw.println(" protected " + javaName + "(" + baseType + " value) {"); pw.println(" _value_ = value;"); if (baseClass.equals("java.lang.String") || baseClass.equals(baseType)) { pw.println(" _table_.put(_value_,this);"); } else { pw.println(" _table_.put(new " + baseClass + "(_value_),this);"); } pw.println(" }"); pw.println(""); // A public static variable of the base type is generated for each enumeration value. // Each variable is preceded by an _. for (int i = 0; i < ids.size(); i++) { // Need to catch the checked MalformedURIException for URI base types if(baseType.equals("org.apache.axis.types.URI")) { pw.println(" public static final " + baseType + " _" + ids.get(i) + ";"); pw.println(" static {"); pw.println(" try {"); pw.println(" _" + ids.get(i) + " = " + values.get(i) + ";"); pw.println(" }"); pw.println(" catch (org.apache.axis.types.URI.MalformedURIException mue) {"); pw.println(" throw new java.lang.RuntimeException(mue.toString());"); pw.println(" }"); pw.println(" }"); pw.println(""); } else { pw.println(" public static final " + baseType + " _" + ids.get(i) + " = " + values.get(i) + ";"); } } // A public static variable is generated for each enumeration value. for (int i = 0; i < ids.size(); i++) { pw.println(" public static final " + javaName + " " + ids.get(i) + " = new " + javaName + "(_" + ids.get(i) + ");"); } // Getter that returns the base value of the enumeration value pw.println(" public " + baseType + " getValue() { return _value_;}"); // FromValue returns the unique enumeration value object from the table pw.println(" public static " + javaName + " fromValue(" + baseType + " value)"); pw.println(" throws java.lang.IllegalArgumentException {"); pw.println(" " + javaName + " enumeration = (" + javaName + ")"); if (baseClass.equals("java.lang.String") || baseClass.equals(baseType)) { pw.println(" _table_.get(value);"); } else { pw.println(" _table_.get(new " + baseClass + "(value));"); } pw.println( " if (enumeration==null) throw new java.lang.IllegalArgumentException();"); pw.println(" return enumeration;"); pw.println(" }"); // FromString returns the unique enumeration value object from a string representation pw.println(" public static " + javaName + " fromString(java.lang.String value)"); pw.println(" throws java.lang.IllegalArgumentException {"); if (baseClass.equals("java.lang.String")) { pw.println(" return fromValue(value);"); } else if (baseClass.equals("javax.xml.namespace.QName")) { pw.println(" try {"); pw.println(" return fromValue(javax.xml.namespace.QName.valueOf" + "(value));"); pw.println(" } catch (Exception e) {"); pw.println( " throw new java.lang.IllegalArgumentException();"); pw.println(" }"); } else if (baseClass.equals(baseType)) { pw.println(" try {"); pw.println(" return fromValue(new " + baseClass + "(value));"); pw.println(" } catch (Exception e) {"); pw.println( " throw new java.lang.IllegalArgumentException();"); pw.println(" }"); } else if (baseClass.equals("java.lang.Character")) { pw.println(" if (value != null && value.length() == 1);"); pw.println(" return fromValue(value.charAt(0));"); pw.println( " throw new java.lang.IllegalArgumentException();"); } else if (baseClass.equals("java.lang.Integer")) { pw.println(" try {"); pw.println( " return fromValue(java.lang.Integer.parseInt(value));"); pw.println(" } catch (Exception e) {"); pw.println( " throw new java.lang.IllegalArgumentException();"); pw.println(" }"); } else { String parse = "parse" + baseClass.substring(baseClass.lastIndexOf(".") + 1); pw.println(" try {"); pw.println(" return fromValue(" + baseClass + "." + parse + "(value));"); pw.println(" } catch (Exception e) {"); pw.println( " throw new java.lang.IllegalArgumentException();"); pw.println(" }"); } pw.println(" }"); // Equals == to determine equality value. // Since enumeration values are singletons, == is appropriate for equals() pw.println( " public boolean equals(java.lang.Object obj) {return (obj == this);}"); // Provide a reasonable hashCode method (hashCode of the string value of the enumeration) pw.println( " public int hashCode() { return toString().hashCode();}"); // toString returns a string representation of the enumerated value if (baseClass.equals("java.lang.String")) { pw.println( " public java.lang.String toString() { return _value_;}"); } else if (baseClass.equals(baseType)) { pw.println( " public java.lang.String toString() { return _value_.toString();}"); } else { pw.println( " public java.lang.String toString() { return java.lang.String.valueOf(_value_);}"); } pw.println( " public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);}"); pw.println( " public static org.apache.axis.encoding.Serializer getSerializer("); pw.println(" java.lang.String mechType, "); pw.println(" java.lang.Class _javaType, "); pw.println(" javax.xml.namespace.QName _xmlType) {"); pw.println(" return "); pw.println( " new org.apache.axis.encoding.ser.EnumSerializer("); pw.println(" _javaType, _xmlType);"); pw.println(" }"); pw.println( " public static org.apache.axis.encoding.Deserializer getDeserializer("); pw.println(" java.lang.String mechType, "); pw.println(" java.lang.Class _javaType, "); pw.println(" javax.xml.namespace.QName _xmlType) {"); pw.println(" return "); pw.println( " new org.apache.axis.encoding.ser.EnumDeserializer("); pw.println(" _javaType, _xmlType);"); pw.println(" }"); pw.println(" // " + Messages.getMessage("typeMeta")); pw.println( " private static org.apache.axis.description.TypeDesc typeDesc ="); pw.println(" new org.apache.axis.description.TypeDesc(" + Utils.getJavaLocalName(type.getName()) + ".class);"); pw.println(); pw.println(" static {"); pw.println(" typeDesc.setXmlType(" + Utils.getNewQName(type.getQName()) + ");"); pw.println(" }"); pw.println(" /**"); pw.println(" * " + Messages.getMessage("returnTypeMeta")); pw.println(" */"); pw.println( " public static org.apache.axis.description.TypeDesc getTypeDesc() {"); pw.println(" return typeDesc;"); pw.println(" }"); pw.println(); } // writeFileBody /** * Get the enumeration names for the values. * The name is affected by whether all of the values of the enumeration * can be expressed as valid java identifiers. * * @param bv Vector base and values vector from getEnumerationBaseAndValues * @return Vector names of enum value identifiers. */ public static Vector getEnumValueIds(Vector bv) { boolean validJava = true; // Assume all enum values are valid ids // Walk the values looking for invalid ids for (int i = 1; (i < bv.size()) && validJava; i++) { String value = (String) bv.get(i); if (!JavaUtils.isJavaId(value)) { validJava = false; } } // Build the vector of ids Vector ids = new Vector(); for (int i = 1; i < bv.size(); i++) { // If any enum values are not valid java, then // all of the ids are of the form value<1..N>. if (!validJava) { ids.add("value" + i); } else { ids.add((String) bv.get(i)); } } return ids; } /** Generate a java source file for enum class. * If the emitter works in deploy mode and the class already exists, the source wull not be generated. */ public void generate() throws IOException { String fqcn = getPackage() + "." + getClassName(); if (emitter.isDeploy()) { if (!emitter.doesExist(fqcn)) { super.generate(); } } else { super.generate(); } } } // class JavaEnumTypeWriter
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2018_07_01; import com.microsoft.azure.arm.model.HasInner; import com.microsoft.azure.management.network.v2018_07_01.implementation.ServiceEndpointPolicyDefinitionInner; import com.microsoft.azure.arm.model.Indexable; import com.microsoft.azure.arm.model.Refreshable; import com.microsoft.azure.arm.model.Updatable; import com.microsoft.azure.arm.model.Appliable; import com.microsoft.azure.arm.model.Creatable; import com.microsoft.azure.arm.resources.models.HasManager; import com.microsoft.azure.management.network.v2018_07_01.implementation.NetworkManager; import java.util.List; /** * Type representing ServiceEndpointPolicyDefinition. */ public interface ServiceEndpointPolicyDefinition extends HasInner<ServiceEndpointPolicyDefinitionInner>, Indexable, Refreshable<ServiceEndpointPolicyDefinition>, Updatable<ServiceEndpointPolicyDefinition.Update>, HasManager<NetworkManager> { /** * @return the description value. */ String description(); /** * @return the etag value. */ String etag(); /** * @return the id value. */ String id(); /** * @return the name value. */ String name(); /** * @return the provisioningState value. */ String provisioningState(); /** * @return the service value. */ String service(); /** * @return the serviceResources value. */ List<String> serviceResources(); /** * The entirety of the ServiceEndpointPolicyDefinition definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithServiceEndpointPolicy, DefinitionStages.WithCreate { } /** * Grouping of ServiceEndpointPolicyDefinition definition stages. */ interface DefinitionStages { /** * The first stage of a ServiceEndpointPolicyDefinition definition. */ interface Blank extends WithServiceEndpointPolicy { } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify ServiceEndpointPolicy. */ interface WithServiceEndpointPolicy { /** * Specifies resourceGroupName, serviceEndpointPolicyName. * @param resourceGroupName The name of the resource group * @param serviceEndpointPolicyName The name of the service endpoint policy * @return the next definition stage */ WithCreate withExistingServiceEndpointPolicy(String resourceGroupName, String serviceEndpointPolicyName); } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify Description. */ interface WithDescription { /** * Specifies description. * @param description A description for this rule. Restricted to 140 chars * @return the next definition stage */ WithCreate withDescription(String description); } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify Etag. */ interface WithEtag { /** * Specifies etag. * @param etag A unique read-only string that changes whenever the resource is updated * @return the next definition stage */ WithCreate withEtag(String etag); } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next definition stage */ WithCreate withId(String id); } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next definition stage */ WithCreate withName(String name); } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify ProvisioningState. */ interface WithProvisioningState { /** * Specifies provisioningState. * @param provisioningState The provisioning state of the service end point policy definition. Possible values are: 'Updating', 'Deleting', and 'Failed' * @return the next definition stage */ WithCreate withProvisioningState(String provisioningState); } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify Service. */ interface WithService { /** * Specifies service. * @param service service endpoint name * @return the next definition stage */ WithCreate withService(String service); } /** * The stage of the serviceendpointpolicydefinition definition allowing to specify ServiceResources. */ interface WithServiceResources { /** * Specifies serviceResources. * @param serviceResources A list of service resources * @return the next definition stage */ WithCreate withServiceResources(List<String> serviceResources); } /** * The stage of the definition which contains all the minimum required inputs for * the resource to be created (via {@link WithCreate#create()}), but also allows * for any other optional settings to be specified. */ interface WithCreate extends Creatable<ServiceEndpointPolicyDefinition>, DefinitionStages.WithDescription, DefinitionStages.WithEtag, DefinitionStages.WithId, DefinitionStages.WithName, DefinitionStages.WithProvisioningState, DefinitionStages.WithService, DefinitionStages.WithServiceResources { } } /** * The template for a ServiceEndpointPolicyDefinition update operation, containing all the settings that can be modified. */ interface Update extends Appliable<ServiceEndpointPolicyDefinition>, UpdateStages.WithDescription, UpdateStages.WithEtag, UpdateStages.WithId, UpdateStages.WithName, UpdateStages.WithProvisioningState, UpdateStages.WithService, UpdateStages.WithServiceResources { } /** * Grouping of ServiceEndpointPolicyDefinition update stages. */ interface UpdateStages { /** * The stage of the serviceendpointpolicydefinition update allowing to specify Description. */ interface WithDescription { /** * Specifies description. * @param description A description for this rule. Restricted to 140 chars * @return the next update stage */ Update withDescription(String description); } /** * The stage of the serviceendpointpolicydefinition update allowing to specify Etag. */ interface WithEtag { /** * Specifies etag. * @param etag A unique read-only string that changes whenever the resource is updated * @return the next update stage */ Update withEtag(String etag); } /** * The stage of the serviceendpointpolicydefinition update allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next update stage */ Update withId(String id); } /** * The stage of the serviceendpointpolicydefinition update allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next update stage */ Update withName(String name); } /** * The stage of the serviceendpointpolicydefinition update allowing to specify ProvisioningState. */ interface WithProvisioningState { /** * Specifies provisioningState. * @param provisioningState The provisioning state of the service end point policy definition. Possible values are: 'Updating', 'Deleting', and 'Failed' * @return the next update stage */ Update withProvisioningState(String provisioningState); } /** * The stage of the serviceendpointpolicydefinition update allowing to specify Service. */ interface WithService { /** * Specifies service. * @param service service endpoint name * @return the next update stage */ Update withService(String service); } /** * The stage of the serviceendpointpolicydefinition update allowing to specify ServiceResources. */ interface WithServiceResources { /** * Specifies serviceResources. * @param serviceResources A list of service resources * @return the next update stage */ Update withServiceResources(List<String> serviceResources); } } }
package com.wistron.WiViewer; import android.graphics.Color; import android.text.SpannableString; import android.text.Spanned; import android.text.style.AbsoluteSizeSpan; import android.text.style.ForegroundColorSpan; public class VideoInfo { private String m_title; private String m_album; private String m_artist; private String m_displayName; private String m_mimeType; private String m_path; private long m_size; private long m_duration; public String getM_date() { return m_date; } public String getM_format() { return m_format; } public String getM_videoquality() { return m_videoquality; } public String getM_videoSize() { return m_videoSize; } public String getM_framerate() { return m_framerate; } public String getM_AudioChannel() { return m_AudioChannel; } public void setM_date(String m_date) { this.m_date = m_date; } public void setM_format(String m_format) { this.m_format = m_format; } public void setM_videoquality(String m_videoquality) { this.m_videoquality = m_videoquality; } public void setM_videoSize(String m_videoSize) { this.m_videoSize = m_videoSize; } public void setM_framerate(String m_framerate) { this.m_framerate = m_framerate; } public void setM_AudioChannel(String m_AudioChannel) { this.m_AudioChannel = m_AudioChannel; } private String m_date; private String m_format; private String m_videoquality; private String m_videoSize; private String m_framerate; private String m_AudioChannel; private String[] strings; private String[] stringsInfo; public VideoInfo() { super(); } public String getTitle() { if(this.m_title==null) return "Unknown"; return m_title; } public void setTitle(String title) { this.m_title = title; } public String getAlbum() { if(this.m_album==null) return "Unknown"; return m_album; } public void setAlbum(String album) { this.m_album = album; } public String getArtist() { if(this.m_artist==null ) return "Unknown"; return m_artist; } public void setArtist(String artist) { this.m_artist = artist; } public String getDisplayName() { if(this.m_displayName==null ) return "Unknown"; return m_displayName; } public void setDisplayName(String displayName) { this.m_displayName = displayName; } public String getMimeType() { return m_mimeType; } public void setMimeType(String mimeType) { this.m_mimeType = mimeType; } public String getPath() { if(this.m_path==null) return "Unknown"; return this.m_path; } public String showPath() { String string=this.m_path; if(this.m_path==null) return "Unknown"; if(string.length()>37){ return string.substring(0,35)+"..."; } else{ return string; } } public void setPath(String path) { this.m_path = path; } public String getSize() { if(this.m_size<=0) return "Unknown"; float size = m_size; if (size <= 1024) return size + "Byte"; else if (size > 1024 && size <= 10 * 1024 * 1024) { String s=String.valueOf(size / 1024 - (int) (size / 1024))+"0000"; return (int) (size / 1024) + s.substring(1, 4) + "KB"; } else { String ss=String.valueOf( size / (1024 * 1024) - (int) (size / (1024 * 1024)))+"000"; return (int) (size / (1024 * 1024)) + ss.substring(1,3) + "MB"; } } public void setSize(long size) { this.m_size = size; } public String getDuration() { if(this.m_duration<=0) return "Unknown"; double duration = 0; int minute = 0, second = 0, hour =0; duration=m_duration; duration=Math.ceil(duration/1000); hour=(int) (duration)/3600; minute = (int) (duration)/60-hour*60; second = (int) (duration) %60; return String.format("%02d:%02d:%02d",hour,minute,second); } public void setDuration(long duration) { this.m_duration = duration; } public void setLable() { strings=new String[8]; strings[0]="Video Title"; strings[1]="Duration"; strings[2]="Creation Date & Time"; strings[3]="Compression"; strings[4]="Video Quality"; strings[5]="Video Size"; strings[6]="Frame Rate"; strings[7]="Audio Channel"; } public void setInfo() { stringsInfo=new String[8]; stringsInfo[0]= this.getTitle(); stringsInfo[1]=this.getDuration(); stringsInfo[2]=this.getM_date(); stringsInfo[3]=this.getM_format(); stringsInfo[4]=this.getM_videoquality(); stringsInfo[5]=this.getSize(); stringsInfo[6]=this.getM_framerate(); stringsInfo[7]=this.getM_AudioChannel(); } public SpannableString[] showInfoList() { SpannableString[] res=new SpannableString[8]; setLable(); setInfo(); ForegroundColorSpan whiteColor=new ForegroundColorSpan(Color.WHITE); ForegroundColorSpan infoColor=new ForegroundColorSpan(Color.rgb(31, 234, 189)); for(int i=0;i<res.length;i++) { int curindex=0; res[i]=new SpannableString(strings[i] + "\n"+ stringsInfo[i]); int curstring=strings[i].length(); int curinfo= stringsInfo[i].length(); res[i].setSpan(whiteColor, curindex, curindex+curstring, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); curindex=curindex+curstring+1; res[i].setSpan(infoColor,curindex, curindex+curinfo, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } return res; } public SpannableString showInfo() { setLable(); setInfo(); String string; string = strings[0] + "\n"+ stringsInfo[0]+ "\n"+ " \n" + strings[1]+"\n"+ stringsInfo[1]+ "\n"+ " \n" + strings[2]+"\n"+ stringsInfo[2]+ "\n"+ " \n" + strings[3]+"\n"+ stringsInfo[3]+ "\n"+ " \n" + strings[4] +"\n"+ stringsInfo[4] + "\n"+ " \n" + strings[5]+"\n"+ stringsInfo[5]+ "\n"+ " \n" +strings[6]+"\n"+ stringsInfo[6]+ "\n"+ " \n" +strings[7]+"\n"+ stringsInfo[7]+ "\n"+ " \n"; SpannableString ss = new SpannableString(string); int curindex=0; int curstring=0; int curinfo=0; for(int i=0;i<strings.length;i++) { curstring=strings[i].length(); curinfo= stringsInfo[i].length(); ss.setSpan(new AbsoluteSizeSpan(20), curindex, curindex+curstring+1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); curindex=curindex+curstring+1; ss.setSpan(new AbsoluteSizeSpan(15),curindex, curindex+curinfo+1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); curindex=curindex+curinfo+1; ss.setSpan(new AbsoluteSizeSpan(5),curindex, curindex+2, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); curindex=curindex+2; } return ss; } public SpannableString showInfo_() { setLable(); setInfo(); String string; string =strings[0]+"\n"+stringsInfo[0]+"\n" + strings[1]+"\n"+stringsInfo[1]+"\n" + strings[2]+"\n"+stringsInfo[2]+"\n" + strings[3]+"\n"+stringsInfo[3]+"\n" + strings[4]+"\n"+stringsInfo[4]+"\n" + strings[5]+"\n"+stringsInfo[5]+"\n" + strings[6]+"\n"+stringsInfo[6]+"\n" + strings[7]+"\n"+stringsInfo[7]; SpannableString ss = new SpannableString(string); int curindex=0; int curstring=0; int curinfo=0; for(int i=0;i<strings.length;i++) { curstring=strings[i].length(); curinfo= stringsInfo[i].length(); ss.setSpan(new AbsoluteSizeSpan(20), curindex, curindex+curstring, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); curindex=curindex+curstring+1; ss.setSpan(new AbsoluteSizeSpan(15),curindex, curindex+curinfo, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); curindex=curindex+curinfo+1; } return ss; } public SpannableString showInfo__() { setLable(); setInfo(); String string; string= strings[0]+stringsInfo[0]+"\n" + strings[1]+stringsInfo[1]+"\n" + strings[2]+stringsInfo[2]+"\n" + strings[3]+stringsInfo[3]+"\n" + strings[4]+stringsInfo[4]+"\n" + strings[5]+stringsInfo[5]+"\n" + strings[6]+stringsInfo[6]+"\n" + strings[7]+stringsInfo[7]; SpannableString ss = new SpannableString(string); int curindex=0; int curstring=0; int curinfo=0; for(int i=0;i<strings.length;i++) { curstring=strings[i].length(); curinfo= stringsInfo[i].length(); ss.setSpan(new ForegroundColorSpan(Color.WHITE), curindex, curindex+curstring, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); ss.setSpan(new ForegroundColorSpan(0xff88ee00), curindex+curstring, curindex+curstring+curinfo, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); curindex=curindex+curstring+curinfo+1; } return ss; } }
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.identity.integration.test.identity.mgt; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.ConfigurationContextFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.wso2.identity.integration.common.clients.Idp.IdentityProviderMgtServiceClient; import org.wso2.carbon.identity.application.common.model.idp.xsd.FederatedAuthenticatorConfig; import org.wso2.carbon.identity.application.common.model.idp.xsd.IdentityProvider; import org.wso2.carbon.identity.application.common.model.idp.xsd.JustInTimeProvisioningConfig; import org.wso2.carbon.identity.application.common.model.idp.xsd.Property; import org.wso2.carbon.identity.application.common.model.idp.xsd.ProvisioningConnectorConfig; import org.wso2.identity.integration.common.utils.ISIntegrationTest; import java.util.HashMap; import java.util.List; import java.util.Map; public class IdentityProviderMgtServiceTestCase extends ISIntegrationTest { private static final Log log = LogFactory.getLog(IdentityProviderMgtServiceTestCase.class); private IdentityProviderMgtServiceClient idpMgtServiceClient; private String sampleCertificate; private String testIdpName = "TestIDPProvider"; private String updatedTestIdpName = "UpdatedTestIDPProvider"; private String testFedAuthName = "OpenIDAuthenticator"; //Resident idp default values private boolean residentIdpEnable; private boolean residentIdpPrimary; private String residentIdpName; private String residentIDPDefaultRealm; private String defaultSamlSSOEntityID = "localhost"; private final String SAML2SSO_NAME = "samlsso"; private final String SAML2SSO_IDP_ENTITY_ID = "IdPEntityId"; @BeforeClass(alwaysRun = true) public void testInit() throws Exception { super.init(); ConfigurationContext configContext = ConfigurationContextFactory.createConfigurationContextFromFileSystem(null, null); idpMgtServiceClient = new IdentityProviderMgtServiceClient(sessionCookie, backendURL, configContext); } @AfterClass(alwaysRun = true) public void atEnd() throws Exception { //Restore default values for changes made to resident IDP IdentityProvider residentProvider = idpMgtServiceClient.getResidentIdP(); Assert.assertNotNull(residentProvider, "Resident idp retrieval failed to restoring"); //restore resident identity provider IdentityProvider identityProvider = new IdentityProvider(); identityProvider.setEnable(residentIdpEnable); identityProvider.setPrimary(residentIdpPrimary); identityProvider.setIdentityProviderName(residentIdpName); identityProvider.setHomeRealmId(residentIDPDefaultRealm); FederatedAuthenticatorConfig samlFedAuthn = new FederatedAuthenticatorConfig(); samlFedAuthn.setName(SAML2SSO_NAME); Property[] properties = new Property[1]; Property property = new Property(); property.setName(SAML2SSO_IDP_ENTITY_ID); property.setValue(defaultSamlSSOEntityID); properties[0] = property; samlFedAuthn.setProperties(properties); FederatedAuthenticatorConfig[] federatedAuthenticators = new FederatedAuthenticatorConfig[1]; federatedAuthenticators[0] = samlFedAuthn; identityProvider.setFederatedAuthenticatorConfigs(federatedAuthenticators); idpMgtServiceClient.updateResidentIdP(identityProvider); log.info("resident idp restored"); } @Test(priority = 1, groups = "wso2.is", description = "Test getResidentIdP operation") public void testGetResidentIdP() throws Exception { log.info("Retrieving resident identity provide"); IdentityProvider idProvider = idpMgtServiceClient.getResidentIdP(); Assert.assertNotNull(idProvider, "Resident identity provider retrieval failed"); sampleCertificate = idProvider.getCertificate(); //Extract authenticator configurations FederatedAuthenticatorConfig[] authConfigs = idProvider.getFederatedAuthenticatorConfigs(); log.info("Authenticator configs : " + authConfigs.length); //Extract provisioning configurations ProvisioningConnectorConfig[] provisioningConfigs = idProvider.getProvisioningConnectorConfigs(); log.info("Provisioning configs : " + provisioningConfigs.length); //check default identity provider name Assert.assertEquals(idProvider.getIdentityProviderName(), "LOCAL", "Default resident identity provider name changed"); //check Default number of authentication configurations Map<String, FederatedAuthenticatorConfig> fedAuthConfigMap = new HashMap<String, FederatedAuthenticatorConfig>(); for (FederatedAuthenticatorConfig config : authConfigs) { fedAuthConfigMap.put(config.getName(), config); } Assert.assertEquals(fedAuthConfigMap.containsKey("openidconnect"), true, "Default auth config not found"); Assert.assertEquals(fedAuthConfigMap.containsKey("samlsso"), true, "Default auth config not found"); Assert.assertEquals(fedAuthConfigMap.containsKey("openid"), true, "Default auth config not found"); Assert.assertEquals(fedAuthConfigMap.containsKey("passivests"), true, "Default auth config not found"); //check Default number of provisioning configurations Map<String, ProvisioningConnectorConfig> provisioningConfigMap = new HashMap<String, ProvisioningConnectorConfig>(); for (ProvisioningConnectorConfig config : provisioningConfigs) { provisioningConfigMap.put(config.getName(), config); } Assert.assertEquals(provisioningConfigMap.containsKey("scim"), true, "Default provisioning config not found"); } @Test(priority = 2, groups = "wso2.is", description = "Test addIdp operation") public void testAddIdp() throws Exception { String testIdpDescription = "This is test identity provider"; String testIdpRealmId = "localhost"; String testFedAuthDispName = "openid"; String testFedAuthPropName = "OpenIdUrl"; String testFedAuthPropValue = "https://testDomain:9853/openid"; String testFedAuthPropName2 = "IsUserIdInClaims"; String testFedAuthPropValue2 = "false"; String testFedAuthPropName3 = "RealmId"; String testFedAuthPropValue3 = "localhost"; String testProvisionConfName = "scim"; String testProvisionPropName = "scim-user-ep"; String testProvisionPropDisplayName = "userEndPoint"; String testProvisionPropValue = "https://localhost:9853/testProvisionLink"; String testProvisionPropName2 = "scim-username"; String testProvisionPropDisplayName2 = "userName"; String testProvisionPropValue2 = "admin"; String testProvisionPropName3 = "scim-password"; String testProvisionPropDisplayName3 = "userPassword"; String testProvisionPropValue3 = "admin"; IdentityProvider idProvider = new IdentityProvider(); FederatedAuthenticatorConfig[] fedAuthConfigs = new FederatedAuthenticatorConfig[1]; //set idp information idProvider.setHomeRealmId(testIdpRealmId); idProvider.setEnable(true); idProvider.setIdentityProviderDescription(testIdpDescription); idProvider.setIdentityProviderName(testIdpName); idProvider.setCertificate(sampleCertificate); idProvider.setFederationHub(false); idProvider.setPrimary(false); //Add federated authentication configuration FederatedAuthenticatorConfig authConfig = new FederatedAuthenticatorConfig(); authConfig.setDisplayName(testFedAuthDispName); authConfig.setEnabled(true); authConfig.setName(testFedAuthName); //set properties //property 1 Property fedProp = new Property(); fedProp.setName(testFedAuthPropName); fedProp.setValue(testFedAuthPropValue); //property 2 Property fedProp2 = new Property(); fedProp2.setName(testFedAuthPropName2); fedProp2.setValue(testFedAuthPropValue2); //property 3 Property fedProp3 = new Property(); fedProp3.setName(testFedAuthPropName3); fedProp3.setValue(testFedAuthPropValue3); Property[] props = new Property[3]; props[0] = fedProp; props[1] = fedProp2; props[2] = fedProp3; authConfig.setProperties(props); fedAuthConfigs[0] = authConfig; idProvider.setFederatedAuthenticatorConfigs(fedAuthConfigs); //Set JIT config JustInTimeProvisioningConfig jitConfig = new JustInTimeProvisioningConfig(); jitConfig.setProvisioningEnabled(true); idProvider.setJustInTimeProvisioningConfig(jitConfig); ProvisioningConnectorConfig provisioningConfig = new ProvisioningConnectorConfig(); provisioningConfig.setName(testProvisionConfName); provisioningConfig.setValid(false); provisioningConfig.setBlocking(false); provisioningConfig.setEnabled(true); //set provisioning properties Property provisionProp = new Property(); provisionProp.setName(testProvisionPropName); provisionProp.setDisplayName(testProvisionPropDisplayName); provisionProp.setValue(testProvisionPropValue); Property provisionProp2 = new Property(); provisionProp2.setName(testProvisionPropName2); provisionProp2.setDisplayName(testProvisionPropDisplayName2); provisionProp2.setValue(testProvisionPropValue2); Property provisionProp3 = new Property(); provisionProp3.setName(testProvisionPropName3); provisionProp3.setDisplayName(testProvisionPropDisplayName3); provisionProp3.setValue(testProvisionPropValue3); Property[] provisionProps = new Property[3]; provisionProps[0] = provisionProp; provisionProps[1] = provisionProp2; provisionProps[2] = provisionProp3; provisioningConfig.setProvisioningProperties(provisionProps); ProvisioningConnectorConfig[] provisionConfigs = new ProvisioningConnectorConfig[1]; provisionConfigs[0] = provisioningConfig; idProvider.setProvisioningConnectorConfigs(provisionConfigs); //add new identity provider idpMgtServiceClient.addIdP(idProvider); //check adding idp success IdentityProvider addedIdp = idpMgtServiceClient.getIdPByName(testIdpName); Assert.assertNotNull(addedIdp, "addIdP or getIdPByName failed"); Assert.assertEquals(addedIdp.getHomeRealmId(), testIdpRealmId, "addIdP : setting home realm failed"); Assert.assertEquals(addedIdp.getCertificate(), sampleCertificate, "addIdP : setting certificate failed"); //idp auto enabled Assert.assertEquals(addedIdp.getEnable(), true, "addIdP : idp enable failed"); Assert.assertEquals(addedIdp.getIdentityProviderDescription(), testIdpDescription, "addIdP : setting description failed"); Assert.assertEquals(addedIdp.getFederationHub(), false, "addIdP : setting federation hub status failed"); Assert.assertEquals(addedIdp.getPrimary(), false, "addIdP : setting primary status failed"); //Check added federated authenticator configs FederatedAuthenticatorConfig[] addedFedAuth = addedIdp.getFederatedAuthenticatorConfigs(); Assert.assertNotNull(addedFedAuth, "federated authenticator not found"); Assert.assertEquals(addedFedAuth.length, 1, "addIdP : deviation from expected number of federated authenticators"); Assert.assertEquals(addedFedAuth[0].getName(), testFedAuthName, "addIdP : federated authenticator name setting failed"); Property[] fedAuthProps = addedFedAuth[0].getProperties(); Assert.assertNotNull(fedAuthProps, "addIdP : federated authenticator properties not found"); Assert.assertEquals(fedAuthProps.length, 3, "addIdP : Deviation of expected number of authenticator properties"); Map<String, Property> propertyMap = new HashMap<String, Property>(); for (Property fedAuthProp : fedAuthProps) { propertyMap.put(fedAuthProp.getName(), fedAuthProp); } Assert.assertEquals(propertyMap.containsKey(testFedAuthPropName), true, "addIdP : federated authenticator property not found"); Assert.assertEquals(propertyMap.get(testFedAuthPropName).getValue(), testFedAuthPropValue, "Deviation of federated authenticator property value"); Assert.assertEquals(propertyMap.containsKey(testFedAuthPropName2), true, "addIdP : federated authenticator property not found"); Assert.assertEquals(propertyMap.get(testFedAuthPropName2).getValue(), testFedAuthPropValue2, "Deviation of federated authenticator property value"); Assert.assertEquals(propertyMap.containsKey(testFedAuthPropName3), true, "addIdP : federated authenticator property not found"); Assert.assertEquals(propertyMap.get(testFedAuthPropName3).getValue(), testFedAuthPropValue3, "Deviation of federated authenticator property value"); propertyMap.clear(); //check provisioning connector configs ProvisioningConnectorConfig[] provisioningConfigs = addedIdp.getProvisioningConnectorConfigs(); Assert.assertNotNull(provisioningConfigs, "addIdP : provisioning connector not found"); Assert.assertEquals(provisioningConfigs.length, 1, "addIdP : Provisioning configuration property adding failed"); Assert.assertEquals(provisioningConfigs[0].getName(), testProvisionConfName, "addIdP : Provisioning configuration name setting failed"); Property[] provisioningProps = provisioningConfigs[0].getProvisioningProperties(); Assert.assertNotNull(provisioningProps, "addIdP : provisioning property not found"); Assert.assertEquals(provisioningProps.length, 3, "addIdP :Provisioning configuration property setting failed"); for (Property provisioningProp : provisioningProps) { propertyMap.put(provisioningProp.getName(), provisioningProp); } Assert.assertEquals(propertyMap.containsKey(testProvisionPropName), true, "addIdP : Provisioning configuration property not found : " + testProvisionPropName); Assert.assertEquals(propertyMap.get(testProvisionPropName).getValue(), testProvisionPropValue, "addIdP : Provisioning configuration property value failed : " + testProvisionPropName); Assert.assertEquals(propertyMap.containsKey(testProvisionPropName2), true, "addIdP : Provisioning configuration property not found : " + testProvisionPropName2); Assert.assertEquals(propertyMap.get(testProvisionPropName2).getValue(), testProvisionPropValue2, "addIdP : Provisioning configuration property value failed : " + testProvisionPropName2); Assert.assertEquals(propertyMap.containsKey(testProvisionPropName3), true, "addIdP : Provisioning configuration property not found : " + testProvisionPropName3); Assert.assertEquals(propertyMap.get(testProvisionPropName3).getValue(), testProvisionPropValue3, "addIdP : Provisioning configuration property value failed : " + testProvisionPropName3); //check jit Assert.assertEquals(addedIdp.getJustInTimeProvisioningConfig().getProvisioningEnabled(), true, "addIdP : JIT enabling failed"); } @Test(priority = 3, groups = "wso2.is", description = "test getAllIdPs operation") public void testGetAllIdPs() throws Exception { List<IdentityProvider> providers = idpMgtServiceClient.getIdPs(); Assert.assertNotNull(providers); log.info("All idp list : " + providers.size()); //added test IDP included in the list if (providers.size() > 0) { IdentityProvider addedProvider = null; for (IdentityProvider provider : providers) { if (provider.getIdentityProviderName().equals(testIdpName)) { addedProvider = provider; } } Assert.assertNotNull(addedProvider, "Added new test idp not found in the idp list"); } else { Assert.fail("Unable to find added identity provider"); } } @Test(priority = 4, groups = "wso2.is", description = "test getEnabledAllIdPs operation") public void testGetEnabledAllIdPs() throws Exception { List<IdentityProvider> idpList = idpMgtServiceClient.getEnabledIdPs(); Assert.assertNotNull(idpList, "Enabled idp retrieval failed"); if (idpList.size() > 0) { IdentityProvider addedProvider = null; for (IdentityProvider provider : idpList) { if (provider.getIdentityProviderName().equals(testIdpName)) { addedProvider = provider; } } Assert.assertNotNull(addedProvider, "Added new test idp not found in the idp list : " + testIdpName); } else { Assert.fail("Unable to find added identity provider"); } } @Test(priority = 5, groups = "wso2.is", description = "test UpdateIdP operation") public void testUpdateIdP() throws Exception { String updatedTestIdpDescription = "This is Updated test identity provider"; IdentityProvider idProvider = idpMgtServiceClient.getIdPByName(testIdpName); Assert.assertNotNull(idProvider, "Idp retrieval failed"); //update description idProvider.setIdentityProviderDescription(updatedTestIdpDescription); //update idp name idProvider.setIdentityProviderName(updatedTestIdpName); //disable idp idProvider.setEnable(false); //update federated auth configurations idProvider.getFederatedAuthenticatorConfigs()[0].setEnabled(false); idProvider.getFederatedAuthenticatorConfigs()[0].setValid(true); idpMgtServiceClient.updateIdP(testIdpName, idProvider); //Check update IdentityProvider updatedProvider = idpMgtServiceClient.getIdPByName(updatedTestIdpName); Assert.assertNotNull(updatedProvider, "Idp update failed"); Assert.assertEquals(updatedProvider.getIdentityProviderDescription(), updatedTestIdpDescription, "IDP description update failed"); Assert.assertEquals(updatedProvider.getIdentityProviderName(), updatedTestIdpName, "IDP name update failed"); Assert.assertEquals(updatedProvider.getEnable(), false, "idp disabling failed"); Assert.assertNotNull(updatedProvider.getFederatedAuthenticatorConfigs(), "Federated authenticator retrieval failed"); Assert.assertEquals(updatedProvider.getFederatedAuthenticatorConfigs().length, 1, "Deviation of expected number of federated authenticators"); Assert.assertEquals(updatedProvider.getFederatedAuthenticatorConfigs()[0].getName(), testFedAuthName, "Incorrect federated authenticated received"); Assert.assertEquals(updatedProvider.getFederatedAuthenticatorConfigs()[0].getEnabled(), false, "federated authenticator enabling failed"); Assert.assertEquals(updatedProvider.getFederatedAuthenticatorConfigs()[0].getValid(), true, "Set validate status failed"); } @Test(priority = 6, groups = "wso2.is", description = "test getAllProvisioningConnectors operation") public void testGetAllProvisioningConnectors() throws Exception { Map<String, ProvisioningConnectorConfig> provisioningCons = idpMgtServiceClient.getAllProvisioningConnectors(); Assert.assertNotNull(provisioningCons, "getAllProvisioningConnectors retrieval failed"); log.info("Available provisioning connectors : " + provisioningCons.size()); if (provisioningCons.size() < 1) { Assert.fail("Default provisioning connectors not available"); } //check current default provisioning connectors Assert.assertEquals(provisioningCons.containsKey("googleapps"), true, "Default provisioning connector googleapps not found"); Assert.assertEquals(provisioningCons.containsKey("salesforce"), true, "Default provisioning connector salesforce not found"); Assert.assertEquals(provisioningCons.containsKey("scim"), true, "Default provisioning connector scim not found"); Assert.assertEquals(provisioningCons.containsKey("spml"), true, "Default provisioning connector spml not found"); } @Test(priority = 7, groups = "wso2.is", description = "test getAllFederatedAuthenticators operation") public void testGetAllFederatedAuthenticators() throws Exception { Map<String, FederatedAuthenticatorConfig> allFedAuthenticators = idpMgtServiceClient.getAllAvailableFederatedAuthenticators(); Assert.assertNotNull(allFedAuthenticators, "getAllFederatedAuthenticators retrieval failed"); log.info("Available federated authenticators : " + allFedAuthenticators.size()); //check current default federated authenticators Assert.assertEquals(allFedAuthenticators.containsKey("FacebookAuthenticator"), true, "Default federated authenticator FacebookAuthenticator not found"); Assert.assertEquals(allFedAuthenticators.containsKey("OpenIDConnectAuthenticator"), true, "Default federated authenticator OpenIDConnectAuthenticator not found"); Assert.assertEquals(allFedAuthenticators.containsKey("MicrosoftWindowsLive"), true, "Default federated authenticator MicrosoftWindowsLive not found"); Assert.assertEquals(allFedAuthenticators.containsKey("OpenIDAuthenticator"), true, "Default federated authenticator OpenIDAuthenticator not found"); Assert.assertEquals(allFedAuthenticators.containsKey("YahooOpenIDAuthenticator"), true, "Default federated authenticator YahooOpenIDAuthenticator not found"); Assert.assertEquals(allFedAuthenticators.containsKey("SAMLSSOAuthenticator"), true, "Default federated authenticator SAMLSSOAuthenticator not found"); Assert.assertEquals(allFedAuthenticators.containsKey("GoogleOAUth2OpenIDAuthenticator"), true, "Default federated authenticator GoogleOAUth2OpenIDAuthenticator not found"); } @Test(priority = 7, groups = "wso2.is", description = "test getAllLocalClaimUris operation") public void testGetAllLocalClaimUris() throws Exception { String[] claimUris = idpMgtServiceClient.getAllLocalClaimUris(); Assert.assertNotNull(claimUris, "claim uri retrieval failed"); log.info("Local claim uris:"); //check for default claim uris if (claimUris.length < 28) { Assert.fail("Claim uri retrieval failed"); } } @Test(priority = 7, groups = "wso2.is", description = "test updateResidentIdP operation") public void testUpdateResidentIdP() throws Exception { String samlEntityId = "samlssoIdp"; String residentIdpRealm = "testHomeRealm"; IdentityProvider residentProvider = idpMgtServiceClient.getResidentIdP(); //get default value residentIdpEnable = residentProvider.getEnable(); residentIdpPrimary = residentProvider.getPrimary(); residentIdpName = residentProvider.getIdentityProviderName(); residentIDPDefaultRealm = residentProvider.getHomeRealmId(); IdentityProvider identityProvider = new IdentityProvider(); identityProvider.setEnable(true); identityProvider.setPrimary(true); identityProvider.setIdentityProviderName("LOCAL"); identityProvider.setHomeRealmId("testHomeRealm"); FederatedAuthenticatorConfig samlFedAuthn = new FederatedAuthenticatorConfig(); samlFedAuthn.setName(SAML2SSO_NAME); Property[] properties = new Property[1]; Property property = new Property(); property.setName(SAML2SSO_IDP_ENTITY_ID); property.setValue(samlEntityId); properties[0] = property; samlFedAuthn.setProperties(properties); FederatedAuthenticatorConfig[] federatedAuthenticators = new FederatedAuthenticatorConfig[1]; federatedAuthenticators[0] = samlFedAuthn; identityProvider.setFederatedAuthenticatorConfigs(federatedAuthenticators); idpMgtServiceClient.updateResidentIdP(identityProvider); //check changes IdentityProvider changedResidentIdp = idpMgtServiceClient.getResidentIdP(); Assert.assertNotNull(changedResidentIdp, "Resident idp retrieval failed"); Assert.assertEquals(changedResidentIdp.getHomeRealmId(), residentIdpRealm); Assert.assertEquals(changedResidentIdp.getEnable(), true, "Resident idp enable failed"); Assert.assertEquals(changedResidentIdp.getPrimary(), true, "Resident idp primary failed"); boolean found = false; for (FederatedAuthenticatorConfig fedConfig : changedResidentIdp.getFederatedAuthenticatorConfigs()) { if (fedConfig.getName().equals(SAML2SSO_NAME)) { for (Property prop : fedConfig.getProperties()) { if (prop.getName().equals(SAML2SSO_IDP_ENTITY_ID)) { found = true; Assert.assertEquals(prop.getValue(), samlEntityId, "Updating federated authenticator property failed"); break; } } break; } } Assert.assertTrue(found, "Resident idp saml sso properties not found"); } @Test(priority = 8, groups = "wso2.is", description = "test deleteIdP operation") public void testDeleteIdP() throws Exception { idpMgtServiceClient.deleteIdP(updatedTestIdpName); IdentityProvider idp = idpMgtServiceClient.getIdPByName(updatedTestIdpName); Assert.assertNull(idp, "Deleting idp failed"); } }
package pl.pft.addressbook.model; import com.google.gson.annotations.Expose; import com.thoughtworks.xstream.annotations.XStreamAlias; import com.thoughtworks.xstream.annotations.XStreamOmitField; import org.hibernate.annotations.Type; import javax.persistence.*; import java.io.File; @XStreamAlias("contact") @Entity @Table(name = "addressbook") public class ContactData { @XStreamOmitField @Id @Column(name = "id") private int id = Integer.MAX_VALUE; @Expose @Column(name = "firstname") private String firstname; @Expose @Column(name = "lastname") private String lastname; @Expose @Column(name = "address") @Type(type = "text") private String address; @Expose @Column(name = "email") @Type(type = "text") private String email; @Expose @Column(name = "email2") @Type(type = "text") private String email2; @Expose @Column(name = "email3") @Type(type = "text") private String email3; @Transient private String allEmails; @Expose @Column(name = "home") @Type(type = "text") private String homePhone; @Expose @Column(name = "mobile") @Type(type = "text") private String mobilePhone; @Expose @Column(name = "work") @Type(type = "text") private String workPhone; @Transient private String allPhones; @Expose @Transient private String group; @Column(name = "photo") @Type(type = "text") private String photo; public ContactData withId(int id) { this.id = id; return this; } public ContactData withFirstname(String firstname) { this.firstname = firstname; return this; } public ContactData withLastname(String lastname) { this.lastname = lastname; return this; } public ContactData withAddress(String address) { this.address = address; return this; } public ContactData withEmail(String email) { this.email = email; return this; } public ContactData withEmail2(String email2) { this.email2 = email2; return this; } public ContactData withEmail3(String email3) { this.email3 = email3; return this; } public ContactData withAllEmails(String allEmails) { this.allEmails = allEmails; return this; } public ContactData withHomePhone(String homePhone) { this.homePhone = homePhone; return this; } public ContactData withMobilePhone(String mobilePhone) { this.mobilePhone = mobilePhone; return this; } public ContactData withWorkPhone(String workPhone) { this.workPhone = workPhone; return this; } public ContactData withAllPhones(String allPhones) { this.allPhones = allPhones; return this; } public ContactData withGroup(String group) { this.group = group; return this; } public ContactData withPhoto(File photo) { this.photo = photo.getPath(); return this; } public int getId() { return id; } public String getFirstname() { return firstname; } public String getLastname() { return lastname; } public String getAddress() { return address; } public String getEmail() { return email; } public String getEmail2() { return email2; } public String getEmail3() { return email3; } public String getAllEmails() { return allEmails; } public String getHomePhone() { return homePhone; } public String getMobilePhone() { return mobilePhone; } public String getWorkPhone() { return workPhone; } public String getAllPhones() { return allPhones; } public String getGroup() { return group; } public File getPhoto() { if (photo == null) { return null; } else { return new File(photo); } } @Override public String toString() { return "ContactData{" + "id=" + id + ", firstname='" + firstname + '\'' + ", lastname='" + lastname + '\'' + ", address='" + address + '\'' + ", email='" + email + '\'' + ", email2='" + email2 + '\'' + ", email3='" + email3 + '\'' + ", homePhone='" + homePhone + '\'' + ", mobilePhone='" + mobilePhone + '\'' + ", workPhone='" + workPhone + '\'' + ", group='" + group + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ContactData that = (ContactData) o; if (id != that.id) return false; if (firstname != null ? !firstname.equals(that.firstname) : that.firstname != null) return false; if (lastname != null ? !lastname.equals(that.lastname) : that.lastname != null) return false; if (address != null ? !address.equals(that.address) : that.address != null) return false; if (email != null ? !email.equals(that.email) : that.email != null) return false; if (email2 != null ? !email2.equals(that.email2) : that.email2 != null) return false; if (email3 != null ? !email3.equals(that.email3) : that.email3 != null) return false; if (homePhone != null ? !homePhone.equals(that.homePhone) : that.homePhone != null) return false; if (mobilePhone != null ? !mobilePhone.equals(that.mobilePhone) : that.mobilePhone != null) return false; return workPhone != null ? workPhone.equals(that.workPhone) : that.workPhone == null; } @Override public int hashCode() { int result = id; result = 31 * result + (firstname != null ? firstname.hashCode() : 0); result = 31 * result + (lastname != null ? lastname.hashCode() : 0); result = 31 * result + (address != null ? address.hashCode() : 0); result = 31 * result + (email != null ? email.hashCode() : 0); result = 31 * result + (email2 != null ? email2.hashCode() : 0); result = 31 * result + (email3 != null ? email3.hashCode() : 0); result = 31 * result + (homePhone != null ? homePhone.hashCode() : 0); result = 31 * result + (mobilePhone != null ? mobilePhone.hashCode() : 0); result = 31 * result + (workPhone != null ? workPhone.hashCode() : 0); return result; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.certificatemanager; import org.w3c.dom.*; import java.net.*; import java.util.*; import java.util.Map.Entry; import org.apache.commons.logging.*; import com.amazonaws.*; import com.amazonaws.auth.*; import com.amazonaws.handlers.*; import com.amazonaws.http.*; import com.amazonaws.internal.*; import com.amazonaws.metrics.*; import com.amazonaws.regions.*; import com.amazonaws.transform.*; import com.amazonaws.util.*; import com.amazonaws.util.json.*; import com.amazonaws.util.AWSRequestMetrics.Field; import com.amazonaws.annotation.ThreadSafe; import com.amazonaws.services.certificatemanager.model.*; import com.amazonaws.services.certificatemanager.model.transform.*; /** * Client for accessing ACM. All service calls made using this client are * blocking, and will not return until the service call completes. * <p> * <fullname>AWS Certificate Manager</fullname> * <p> * Welcome to the AWS Certificate Manager (ACM) Command Reference. This guide * provides descriptions, syntax, and usage examples for each ACM command. You * can use AWS Certificate Manager to request ACM Certificates for your * AWS-based websites and applications. For general information about using ACM * and for more information about using the console, see the <a * href="http://docs.aws.amazon.com/acm/latest/userguide/acm-overview.html">AWS * Certificate Manager User Guide</a>. For more information about using the ACM * API, see the <a * href="http://docs.aws.amazon.com/acm/latest/APIReference/Welcome.html"> AWS * Certificate Manager API Reference</a>. * </p> */ @ThreadSafe public class AWSCertificateManagerClient extends AmazonWebServiceClient implements AWSCertificateManager { /** Provider for AWS credentials. */ private AWSCredentialsProvider awsCredentialsProvider; private static final Log log = LogFactory .getLog(AWSCertificateManager.class); /** Default signing name for the service. */ private static final String DEFAULT_SIGNING_NAME = "acm"; /** The region metadata service name for computing region endpoints. */ private static final String DEFAULT_ENDPOINT_PREFIX = "acm"; /** * Client configuration factory providing ClientConfigurations tailored to * this client */ protected static final ClientConfigurationFactory configFactory = new ClientConfigurationFactory(); /** * List of exception unmarshallers for all ACM exceptions. */ protected List<JsonErrorUnmarshallerV2> jsonErrorUnmarshallers = new ArrayList<JsonErrorUnmarshallerV2>(); /** * Constructs a new client to invoke service methods on ACM. A credentials * provider chain will be used that searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Instance profile credentials delivered through the Amazon EC2 * metadata service</li> * </ul> * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @see DefaultAWSCredentialsProviderChain */ public AWSCertificateManagerClient() { this(new DefaultAWSCredentialsProviderChain(), configFactory .getConfig()); } /** * Constructs a new client to invoke service methods on ACM. A credentials * provider chain will be used that searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Instance profile credentials delivered through the Amazon EC2 * metadata service</li> * </ul> * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param clientConfiguration * The client configuration options controlling how this client * connects to ACM (ex: proxy settings, retry counts, etc.). * * @see DefaultAWSCredentialsProviderChain */ public AWSCertificateManagerClient(ClientConfiguration clientConfiguration) { this(new DefaultAWSCredentialsProviderChain(), clientConfiguration); } /** * Constructs a new client to invoke service methods on ACM using the * specified AWS account credentials. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when * authenticating with AWS services. */ public AWSCertificateManagerClient(AWSCredentials awsCredentials) { this(awsCredentials, configFactory.getConfig()); } /** * Constructs a new client to invoke service methods on ACM using the * specified AWS account credentials and client configuration options. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when * authenticating with AWS services. * @param clientConfiguration * The client configuration options controlling how this client * connects to ACM (ex: proxy settings, retry counts, etc.). */ public AWSCertificateManagerClient(AWSCredentials awsCredentials, ClientConfiguration clientConfiguration) { super(clientConfiguration); this.awsCredentialsProvider = new StaticCredentialsProvider( awsCredentials); init(); } /** * Constructs a new client to invoke service methods on ACM using the * specified AWS account credentials provider. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. */ public AWSCertificateManagerClient( AWSCredentialsProvider awsCredentialsProvider) { this(awsCredentialsProvider, configFactory.getConfig()); } /** * Constructs a new client to invoke service methods on ACM using the * specified AWS account credentials provider and client configuration * options. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @param clientConfiguration * The client configuration options controlling how this client * connects to ACM (ex: proxy settings, retry counts, etc.). */ public AWSCertificateManagerClient( AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration) { this(awsCredentialsProvider, clientConfiguration, null); } /** * Constructs a new client to invoke service methods on ACM using the * specified AWS account credentials provider, client configuration options, * and request metric collector. * * <p> * All service calls made using this new client object are blocking, and * will not return until the service call completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @param clientConfiguration * The client configuration options controlling how this client * connects to ACM (ex: proxy settings, retry counts, etc.). * @param requestMetricCollector * optional request metric collector */ public AWSCertificateManagerClient( AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration, RequestMetricCollector requestMetricCollector) { super(clientConfiguration, requestMetricCollector); this.awsCredentialsProvider = awsCredentialsProvider; init(); } private void init() { jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.InvalidTagException.class, "InvalidTagException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.TooManyTagsException.class, "TooManyTagsException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.ResourceInUseException.class, "ResourceInUseException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.ResourceNotFoundException.class, "ResourceNotFoundException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.RequestInProgressException.class, "RequestInProgressException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.InvalidStateException.class, "InvalidStateException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.InvalidDomainValidationOptionsException.class, "InvalidDomainValidationOptionsException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.InvalidArnException.class, "InvalidArnException")); jsonErrorUnmarshallers .add(new JsonErrorUnmarshallerV2( com.amazonaws.services.certificatemanager.model.LimitExceededException.class, "LimitExceededException")); jsonErrorUnmarshallers .add(JsonErrorUnmarshallerV2.DEFAULT_UNMARSHALLER); setServiceNameIntern(DEFAULT_SIGNING_NAME); setEndpointPrefix(DEFAULT_ENDPOINT_PREFIX); // calling this.setEndPoint(...) will also modify the signer accordingly setEndpoint("https://acm.us-east-1.amazonaws.com"); HandlerChainFactory chainFactory = new HandlerChainFactory(); requestHandler2s .addAll(chainFactory .newRequestHandlerChain("/com/amazonaws/services/certificatemanager/request.handlers")); requestHandler2s .addAll(chainFactory .newRequestHandler2Chain("/com/amazonaws/services/certificatemanager/request.handler2s")); } /** * <p> * Adds one or more tags to an ACM Certificate. Tags are labels that you can * use to identify and organize your AWS resources. Each tag consists of a * <code>key</code> and an optional <code>value</code>. You specify the * certificate on input by its Amazon Resource Name (ARN). You specify the * tag by using a key-value pair. * </p> * <p> * You can apply a tag to just one certificate if you want to identify a * specific characteristic of that certificate, or you can apply the same * tag to multiple certificates if you want to filter for a common * relationship among those certificates. Similarly, you can apply the same * tag to multiple resources if you want to specify a relationship among * those resources. For example, you can add the same tag to an ACM * Certificate and an Elastic Load Balancing load balancer to indicate that * they are both used by the same website. For more information, see <a * href="http://docs.aws.amazon.com/acm/latest/userguide/tags.html">Tagging * ACM Certificates</a>. * </p> * <p> * To remove one or more tags, use the <a>RemoveTagsFromCertificate</a> * action. To view all of the tags that have been applied to the * certificate, use the <a>ListTagsForCertificate</a> action. * </p> * * @param addTagsToCertificateRequest * @throws ResourceNotFoundException * The specified certificate cannot be found in the caller's * account, or the caller's account cannot be found. * @throws InvalidArnException * The requested Amazon Resource Name (ARN) does not refer to an * existing resource. * @throws InvalidTagException * One or both of the values that make up the key-value pair is not * valid. For example, you cannot specify a tag value that begins * with <code>aws:</code>. * @throws TooManyTagsException * The request contains too many tags. Try the request again with * fewer tags. * @sample AWSCertificateManager.AddTagsToCertificate */ @Override public void addTagsToCertificate( AddTagsToCertificateRequest addTagsToCertificateRequest) { ExecutionContext executionContext = createExecutionContext(addTagsToCertificateRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<AddTagsToCertificateRequest> request = null; Response<Void> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new AddTagsToCertificateRequestMarshaller() .marshall(super .beforeMarshalling(addTagsToCertificateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<Void> responseHandler = SdkJsonProtocolFactory .createResponseHandler(null, false); responseHandler.setIsPayloadJson(true); invoke(request, responseHandler, executionContext); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Deletes an ACM Certificate and its associated private key. If this action * succeeds, the certificate no longer appears in the list of ACM * Certificates that can be displayed by calling the <a>ListCertificates</a> * action or be retrieved by calling the <a>GetCertificate</a> action. The * certificate will not be available for use by other AWS services. * </p> * <note>You cannot delete an ACM Certificate that is being used by another * AWS service. To delete a certificate that is in use, the certificate * association must first be removed. </note> * * @param deleteCertificateRequest * @throws ResourceNotFoundException * The specified certificate cannot be found in the caller's * account, or the caller's account cannot be found. * @throws ResourceInUseException * The certificate is in use by another AWS service in the caller's * account. Remove the association and try again. * @throws InvalidArnException * The requested Amazon Resource Name (ARN) does not refer to an * existing resource. * @sample AWSCertificateManager.DeleteCertificate */ @Override public void deleteCertificate( DeleteCertificateRequest deleteCertificateRequest) { ExecutionContext executionContext = createExecutionContext(deleteCertificateRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DeleteCertificateRequest> request = null; Response<Void> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DeleteCertificateRequestMarshaller() .marshall(super .beforeMarshalling(deleteCertificateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<Void> responseHandler = SdkJsonProtocolFactory .createResponseHandler(null, false); responseHandler.setIsPayloadJson(true); invoke(request, responseHandler, executionContext); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Returns a list of the fields contained in the specified ACM Certificate. * For example, this action returns the certificate status, a flag that * indicates whether the certificate is associated with any other AWS * service, and the date at which the certificate request was created. You * specify the ACM Certificate on input by its Amazon Resource Name (ARN). * </p> * * @param describeCertificateRequest * @return Result of the DescribeCertificate operation returned by the * service. * @throws ResourceNotFoundException * The specified certificate cannot be found in the caller's * account, or the caller's account cannot be found. * @throws InvalidArnException * The requested Amazon Resource Name (ARN) does not refer to an * existing resource. * @sample AWSCertificateManager.DescribeCertificate */ @Override public DescribeCertificateResult describeCertificate( DescribeCertificateRequest describeCertificateRequest) { ExecutionContext executionContext = createExecutionContext(describeCertificateRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeCertificateRequest> request = null; Response<DescribeCertificateResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeCertificateRequestMarshaller() .marshall(super .beforeMarshalling(describeCertificateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<DescribeCertificateResult> responseHandler = SdkJsonProtocolFactory .createResponseHandler( new DescribeCertificateResultJsonUnmarshaller(), false); responseHandler.setIsPayloadJson(true); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Retrieves an ACM Certificate and certificate chain for the certificate * specified by an ARN. The chain is an ordered list of certificates that * contains the root certificate, intermediate certificates of subordinate * CAs, and the ACM Certificate. The certificate and certificate chain are * base64 encoded. If you want to decode the certificate chain to see the * individual certificate fields, you can use OpenSSL. * </p> * <note> Currently, ACM Certificates can be used only with Elastic Load * Balancing and Amazon CloudFront. </note> * * @param getCertificateRequest * @return Result of the GetCertificate operation returned by the service. * @throws ResourceNotFoundException * The specified certificate cannot be found in the caller's * account, or the caller's account cannot be found. * @throws RequestInProgressException * The certificate request is in process and the certificate in your * account has not yet been issued. * @throws InvalidArnException * The requested Amazon Resource Name (ARN) does not refer to an * existing resource. * @sample AWSCertificateManager.GetCertificate */ @Override public GetCertificateResult getCertificate( GetCertificateRequest getCertificateRequest) { ExecutionContext executionContext = createExecutionContext(getCertificateRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<GetCertificateRequest> request = null; Response<GetCertificateResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new GetCertificateRequestMarshaller().marshall(super .beforeMarshalling(getCertificateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<GetCertificateResult> responseHandler = SdkJsonProtocolFactory .createResponseHandler( new GetCertificateResultJsonUnmarshaller(), false); responseHandler.setIsPayloadJson(true); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Retrieves a list of the ACM Certificate ARNs, and the domain name for * each ARN, owned by the calling account. You can filter the list based on * the <code>CertificateStatuses</code> parameter, and you can display up to * <code>MaxItems</code> certificates at one time. If you have more than * <code>MaxItems</code> certificates, use the <code>NextToken</code> marker * from the response object in your next call to the * <code>ListCertificates</code> action to retrieve the next set of * certificate ARNs. * </p> * * @param listCertificatesRequest * @return Result of the ListCertificates operation returned by the service. * @sample AWSCertificateManager.ListCertificates */ @Override public ListCertificatesResult listCertificates( ListCertificatesRequest listCertificatesRequest) { ExecutionContext executionContext = createExecutionContext(listCertificatesRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListCertificatesRequest> request = null; Response<ListCertificatesResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListCertificatesRequestMarshaller() .marshall(super .beforeMarshalling(listCertificatesRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<ListCertificatesResult> responseHandler = SdkJsonProtocolFactory .createResponseHandler( new ListCertificatesResultJsonUnmarshaller(), false); responseHandler.setIsPayloadJson(true); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists the tags that have been applied to the ACM Certificate. Use the * certificate ARN to specify the certificate. To add a tag to an ACM * Certificate, use the <a>AddTagsToCertificate</a> action. To delete a tag, * use the <a>RemoveTagsFromCertificate</a> action. * </p> * * @param listTagsForCertificateRequest * @return Result of the ListTagsForCertificate operation returned by the * service. * @throws ResourceNotFoundException * The specified certificate cannot be found in the caller's * account, or the caller's account cannot be found. * @throws InvalidArnException * The requested Amazon Resource Name (ARN) does not refer to an * existing resource. * @sample AWSCertificateManager.ListTagsForCertificate */ @Override public ListTagsForCertificateResult listTagsForCertificate( ListTagsForCertificateRequest listTagsForCertificateRequest) { ExecutionContext executionContext = createExecutionContext(listTagsForCertificateRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListTagsForCertificateRequest> request = null; Response<ListTagsForCertificateResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListTagsForCertificateRequestMarshaller() .marshall(super .beforeMarshalling(listTagsForCertificateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<ListTagsForCertificateResult> responseHandler = SdkJsonProtocolFactory .createResponseHandler( new ListTagsForCertificateResultJsonUnmarshaller(), false); responseHandler.setIsPayloadJson(true); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Remove one or more tags from an ACM Certificate. A tag consists of a * key-value pair. If you do not specify the value portion of the tag when * calling this function, the tag will be removed regardless of value. If * you specify a value, the tag is removed only if it is associated with the * specified value. * </p> * <p> * To add tags to a certificate, use the <a>AddTagsToCertificate</a> action. * To view all of the tags that have been applied to a specific ACM * Certificate, use the <a>ListTagsForCertificate</a> action. * </p> * * @param removeTagsFromCertificateRequest * @throws ResourceNotFoundException * The specified certificate cannot be found in the caller's * account, or the caller's account cannot be found. * @throws InvalidArnException * The requested Amazon Resource Name (ARN) does not refer to an * existing resource. * @throws InvalidTagException * One or both of the values that make up the key-value pair is not * valid. For example, you cannot specify a tag value that begins * with <code>aws:</code>. * @sample AWSCertificateManager.RemoveTagsFromCertificate */ @Override public void removeTagsFromCertificate( RemoveTagsFromCertificateRequest removeTagsFromCertificateRequest) { ExecutionContext executionContext = createExecutionContext(removeTagsFromCertificateRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<RemoveTagsFromCertificateRequest> request = null; Response<Void> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new RemoveTagsFromCertificateRequestMarshaller() .marshall(super .beforeMarshalling(removeTagsFromCertificateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<Void> responseHandler = SdkJsonProtocolFactory .createResponseHandler(null, false); responseHandler.setIsPayloadJson(true); invoke(request, responseHandler, executionContext); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Requests an ACM Certificate for use with other AWS services. To request * an ACM Certificate, you must specify the fully qualified domain name * (FQDN) for your site. You can also specify additional FQDNs if users can * reach your site by using other names. For each domain name you specify, * email is sent to the domain owner to request approval to issue the * certificate. After receiving approval from the domain owner, the ACM * Certificate is issued. For more information, see the <a * href="http://docs.aws.amazon.com/acm/latest/userguide/overview.html"> AWS * Certificate Manager User Guide </a>. * </p> * * @param requestCertificateRequest * @return Result of the RequestCertificate operation returned by the * service. * @throws LimitExceededException * An ACM limit has been exceeded. For example, you may have input * more domains than are allowed or you've requested too many * certificates for your account. See the exception message returned * by ACM to determine which limit you have violated. For more * information about ACM limits, see the <a href= * "http://docs.aws.amazon.com/acm/latest/userguide/acm-limits.html" * >Limits</a> topic. * @throws InvalidDomainValidationOptionsException * One or more values in the <a>DomainValidationOption</a> structure * is incorrect. * @sample AWSCertificateManager.RequestCertificate */ @Override public RequestCertificateResult requestCertificate( RequestCertificateRequest requestCertificateRequest) { ExecutionContext executionContext = createExecutionContext(requestCertificateRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<RequestCertificateRequest> request = null; Response<RequestCertificateResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new RequestCertificateRequestMarshaller() .marshall(super .beforeMarshalling(requestCertificateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<RequestCertificateResult> responseHandler = SdkJsonProtocolFactory .createResponseHandler( new RequestCertificateResultJsonUnmarshaller(), false); responseHandler.setIsPayloadJson(true); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Resends the email that requests domain ownership validation. The domain * owner or an authorized representative must approve the ACM Certificate * before it can be issued. The certificate can be approved by clicking a * link in the mail to navigate to the Amazon certificate approval website * and then clicking <b>I Approve</b>. However, the validation email can be * blocked by spam filters. Therefore, if you do not receive the original * mail, you can request that the mail be resent within 72 hours of * requesting the ACM Certificate. If more than 72 hours have elapsed since * your original request or since your last attempt to resend validation * mail, you must request a new certificate. * </p> * * @param resendValidationEmailRequest * @throws ResourceNotFoundException * The specified certificate cannot be found in the caller's * account, or the caller's account cannot be found. * @throws InvalidStateException * Processing has reached an invalid state. For example, this * exception can occur if the specified domain is not using email * validation, or the current certificate status does not permit the * requested operation. See the exception message returned by ACM to * determine which state is not valid. * @throws InvalidArnException * The requested Amazon Resource Name (ARN) does not refer to an * existing resource. * @throws InvalidDomainValidationOptionsException * One or more values in the <a>DomainValidationOption</a> structure * is incorrect. * @sample AWSCertificateManager.ResendValidationEmail */ @Override public void resendValidationEmail( ResendValidationEmailRequest resendValidationEmailRequest) { ExecutionContext executionContext = createExecutionContext(resendValidationEmailRequest); AWSRequestMetrics awsRequestMetrics = executionContext .getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ResendValidationEmailRequest> request = null; Response<Void> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ResendValidationEmailRequestMarshaller() .marshall(super .beforeMarshalling(resendValidationEmailRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } JsonResponseHandler<Void> responseHandler = SdkJsonProtocolFactory .createResponseHandler(null, false); responseHandler.setIsPayloadJson(true); invoke(request, responseHandler, executionContext); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * Returns additional metadata for a previously executed successful, * request, typically used for debugging issues where a service isn't acting * as expected. This data isn't considered part of the result data returned * by an operation, so it's available through this separate, diagnostic * interface. * <p> * Response metadata is only cached for a limited period of time, so if you * need to access this extra diagnostic information for an executed request, * you should use this method to retrieve it as soon as possible after * executing the request. * * @param request * The originally executed request * * @return The response metadata for the specified request, or null if none * is available. */ public ResponseMetadata getCachedResponseMetadata( AmazonWebServiceRequest request) { return client.getResponseMetadataForRequest(request); } /** * Normal invoke with authentication. Credentials are required and may be * overriden at the request level. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> invoke( Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { executionContext.setCredentialsProvider(CredentialUtils .getCredentialsProvider(request.getOriginalRequest(), awsCredentialsProvider)); return doInvoke(request, responseHandler, executionContext); } /** * Invoke with no authentication. Credentials are not required and any * credentials set on the client or request will be ignored for this * operation. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> anonymousInvoke( Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { return doInvoke(request, responseHandler, executionContext); } /** * Invoke the request using the http client. Assumes credentials (or lack * thereof) have been configured in the ExecutionContext beforehand. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> doInvoke( Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { request.setEndpoint(endpoint); request.setTimeOffset(timeOffset); JsonErrorResponseHandlerV2 errorResponseHandler = SdkJsonProtocolFactory .createErrorResponseHandler(jsonErrorUnmarshallers, false); return client.execute(request, responseHandler, errorResponseHandler, executionContext); } }
/* * The MIT License * * Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, * Seiji Sogabe, Stephen Connolly * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import com.infradna.tool.bridge_method_injector.WithBridgeMethods; import hudson.Extension; import hudson.ExtensionPoint; import hudson.FilePath; import hudson.FileSystemProvisioner; import hudson.Launcher; import hudson.Util; import hudson.model.Descriptor.FormException; import hudson.model.Queue.Task; import hudson.model.labels.LabelAtom; import hudson.model.queue.CauseOfBlockage; import hudson.remoting.Callable; import hudson.remoting.VirtualChannel; import hudson.security.ACL; import hudson.security.AccessControlled; import hudson.security.Permission; import hudson.slaves.Cloud; import hudson.slaves.ComputerListener; import hudson.slaves.NodeDescriptor; import hudson.slaves.NodeProperty; import hudson.slaves.NodePropertyDescriptor; import hudson.slaves.OfflineCause; import hudson.util.ClockDifference; import hudson.util.DescribableList; import hudson.util.EnumConverter; import hudson.util.TagCloud; import hudson.util.TagCloud.WeightFunction; import java.io.IOException; import java.lang.reflect.Type; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.logging.Logger; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import jenkins.model.Jenkins; import jenkins.util.SystemProperties; import jenkins.util.io.OnMaster; import net.sf.json.JSONObject; import org.acegisecurity.Authentication; import org.jvnet.localizer.Localizable; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.ProtectedExternally; import org.kohsuke.stapler.BindInterceptor; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; /** * Base type of Jenkins agents (although in practice, you probably extend {@link Slave} to define a new agent type). * * <p> * As a special case, {@link Jenkins} extends from here. * * <p> * Nodes are persisted objects that capture user configurations, and instances get thrown away and recreated whenever * the configuration changes. Running state of nodes are captured by {@link Computer}s. * * <p> * There is no URL binding for {@link Node}. {@link Computer} and {@link TransientComputerActionFactory} must * be used to associate new {@link Action}s to agents. * * @author Kohsuke Kawaguchi * @see NodeDescriptor * @see Computer */ @ExportedBean public abstract class Node extends AbstractModelObject implements ReconfigurableDescribable<Node>, ExtensionPoint, AccessControlled, OnMaster, Saveable { private static final Logger LOGGER = Logger.getLogger(Node.class.getName()); /** @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-46652">JENKINS-46652</a> */ public static /* not final */ boolean SKIP_BUILD_CHECK_ON_FLYWEIGHTS = SystemProperties.getBoolean(Node.class.getName() + ".SKIP_BUILD_CHECK_ON_FLYWEIGHTS", true); /** * Newly copied agents get this flag set, so that Jenkins doesn't try to start/remove this node until its configuration * is saved once. */ protected volatile transient boolean holdOffLaunchUntilSave; public String getDisplayName() { return getNodeName(); // default implementation } public String getSearchUrl() { Computer c = toComputer(); if (c != null) { return c.getUrl(); } return "computer/" + Util.rawEncode(getNodeName()); } public boolean isHoldOffLaunchUntilSave() { return holdOffLaunchUntilSave; } /** * {@inheritDoc} * @since 1.635. */ @Override public void save() throws IOException { // this should be a no-op unless this node instance is the node instance in Jenkins' list of nodes // thus where Jenkins.getInstance() == null there is no list of nodes, so we do a no-op // Nodes.updateNode(n) will only persist the node record if the node instance is in the list of nodes // so either path results in the same behaviour: the node instance is only saved if it is in the list of nodes // for all other cases we do not know where to persist the node record and hence we follow the default // no-op of a Saveable.NOOP final Jenkins jenkins = Jenkins.getInstanceOrNull(); if (jenkins != null) { jenkins.updateNode(this); } } /** * Name of this node. * * @return * "" if this is master */ @Exported(visibility=999) @Nonnull public abstract String getNodeName(); /** * When the user clones a {@link Node}, Hudson uses this method to change the node name right after * the cloned {@link Node} object is instantiated. * * <p> * This method is never used for any other purpose, and as such for all practical intents and purposes, * the node name should be treated like immutable. * * @deprecated to indicate that this method isn't really meant to be called by random code. */ @Deprecated public abstract void setNodeName(String name); /** * Human-readable description of this node. */ @Exported public abstract String getNodeDescription(); /** * Returns a {@link Launcher} for executing programs on this node. * * <p> * The callee must call {@link Launcher#decorateFor(Node)} before returning to complete the decoration. */ public abstract Launcher createLauncher(TaskListener listener); /** * Returns the number of {@link Executor}s. * * This may be different from <code>getExecutors().size()</code> * because it takes time to adjust the number of executors. */ @Exported public abstract int getNumExecutors(); /** * Returns {@link Mode#EXCLUSIVE} if this node is only available * for those jobs that exclusively specifies this node * as the assigned node. */ @Exported public abstract Mode getMode(); /** * Gets the corresponding {@link Computer} object. * * @return * this method can return null if there's no {@link Computer} object for this node, * such as when this node has no executors at all. */ @CheckForNull public final Computer toComputer() { AbstractCIBase ciBase = Jenkins.getInstance(); return ciBase.getComputer(this); } /** * Gets the current channel, if the node is connected and online, or null. * * This is just a convenience method for {@link Computer#getChannel()} with null check. */ @CheckForNull public final VirtualChannel getChannel() { Computer c = toComputer(); return c==null ? null : c.getChannel(); } /** * Creates a new {@link Computer} object that acts as the UI peer of this {@link Node}. * * Nobody but {@link Jenkins#updateComputerList()} should call this method. * @return Created instance of the computer. * Can be {@code null} if the {@link Node} implementation does not support it (e.g. {@link Cloud} agent). */ @CheckForNull @Restricted(ProtectedExternally.class) protected abstract Computer createComputer(); /** * Returns {@code true} if the node is accepting tasks. Needed to allow agents programmatic suspension of task * scheduling that does not overlap with being offline. Called by {@link Computer#isAcceptingTasks()}. * This method is distinct from {@link Computer#isAcceptingTasks()} as sometimes the {@link Node} concrete * class may not have control over the {@link hudson.model.Computer} concrete class associated with it. * * @return {@code true} if the node is accepting tasks. * @see Computer#isAcceptingTasks() * @since 1.586 */ public boolean isAcceptingTasks() { return true; } /** * Let Nodes be aware of the lifecycle of their own {@link Computer}. */ @Extension public static class InternalComputerListener extends ComputerListener { @Override public void onOnline(Computer c, TaskListener listener) { Node node = c.getNode(); // At startup, we need to restore any previously in-effect temp offline cause. // We wait until the computer is started rather than getting the data to it sooner // so that the normal computer start up processing works as expected. if (node!= null && node.temporaryOfflineCause != null && node.temporaryOfflineCause != c.getOfflineCause()) { c.setTemporarilyOffline(true, node.temporaryOfflineCause); } } } private OfflineCause temporaryOfflineCause; /** * Enable a {@link Computer} to inform its node when it is taken * temporarily offline. */ void setTemporaryOfflineCause(OfflineCause cause) { try { if (temporaryOfflineCause != cause) { temporaryOfflineCause = cause; save(); } } catch (java.io.IOException e) { LOGGER.warning("Unable to complete save, temporary offline status will not be persisted: " + e.getMessage()); } } /** * Return the possibly empty tag cloud for the labels of this node. */ public TagCloud<LabelAtom> getLabelCloud() { return new TagCloud<LabelAtom>(getAssignedLabels(),new WeightFunction<LabelAtom>() { public float weight(LabelAtom item) { return item.getTiedJobCount(); } }); } /** * Returns the possibly empty set of labels that are assigned to this node, * including the automatic {@link #getSelfLabel() self label}, manually * assigned labels and dynamically assigned labels via the * {@link LabelFinder} extension point. * * This method has a side effect of updating the hudson-wide set of labels * and should be called after events that will change that - e.g. a agent * connecting. */ @Exported public Set<LabelAtom> getAssignedLabels() { Set<LabelAtom> r = Label.parse(getLabelString()); r.add(getSelfLabel()); r.addAll(getDynamicLabels()); return Collections.unmodifiableSet(r); } /** * Return all the labels assigned dynamically to this node. * This calls all the LabelFinder implementations with the node converts * the results into Labels. * @return HashSet<Label>. */ private HashSet<LabelAtom> getDynamicLabels() { HashSet<LabelAtom> result = new HashSet<LabelAtom>(); for (LabelFinder labeler : LabelFinder.all()) { // Filter out any bad(null) results from plugins // for compatibility reasons, findLabels may return LabelExpression and not atom. for (Label label : labeler.findLabels(this)) if (label instanceof LabelAtom) result.add((LabelAtom)label); } return result; } /** * Returns the manually configured label for a node. The list of assigned * and dynamically determined labels is available via * {@link #getAssignedLabels()} and includes all labels that have been * manually configured. * * Mainly for form binding. */ public abstract String getLabelString(); /** * Sets the label string for a node. This value will be returned by {@link #getLabelString()}. * * @param labelString * The new label string to use. * @since 1.477 */ public void setLabelString(String labelString) throws IOException { throw new UnsupportedOperationException(); } /** * Gets the special label that represents this node itself. */ @Nonnull @WithBridgeMethods(Label.class) public LabelAtom getSelfLabel() { return LabelAtom.get(getNodeName()); } /** * Called by the {@link Queue} to determine whether or not this node can * take the given task. The default checks include whether or not this node * is part of the task's assigned label, whether this node is in * {@link Mode#EXCLUSIVE} mode if it is not in the task's assigned label, * and whether or not any of this node's {@link NodeProperty}s say that the * task cannot be run. * * @since 1.360 * @deprecated as of 1.413 * Use {@link #canTake(Queue.BuildableItem)} */ @Deprecated public CauseOfBlockage canTake(Task task) { return null; } /** * Called by the {@link Queue} to determine whether or not this node can * take the given task. The default checks include whether or not this node * is part of the task's assigned label, whether this node is in * {@link Mode#EXCLUSIVE} mode if it is not in the task's assigned label, * and whether or not any of this node's {@link NodeProperty}s say that the * task cannot be run. * * @since 1.413 */ public CauseOfBlockage canTake(Queue.BuildableItem item) { Label l = item.getAssignedLabel(); if(l!=null && !l.contains(this)) return CauseOfBlockage.fromMessage(Messages._Node_LabelMissing(getDisplayName(), l)); // the task needs to be executed on label that this node doesn't have. if(l==null && getMode()== Mode.EXCLUSIVE) { // flyweight tasks need to get executed somewhere, if every node if (!(item.task instanceof Queue.FlyweightTask && ( this instanceof Jenkins || Jenkins.getInstance().getNumExecutors() < 1 || Jenkins.getInstance().getMode() == Mode.EXCLUSIVE) )) { return CauseOfBlockage.fromMessage(Messages._Node_BecauseNodeIsReserved(getDisplayName())); // this node is reserved for tasks that are tied to it } } Authentication identity = item.authenticate(); if (!(SKIP_BUILD_CHECK_ON_FLYWEIGHTS && item.task instanceof Queue.FlyweightTask) && !hasPermission(identity, Computer.BUILD)) { // doesn't have a permission return CauseOfBlockage.fromMessage(Messages._Node_LackingBuildPermission(identity.getName(), getDisplayName())); } // Check each NodeProperty to see whether they object to this node // taking the task for (NodeProperty prop: getNodeProperties()) { CauseOfBlockage c = prop.canTake(item); if (c!=null) return c; } if (!isAcceptingTasks()) { return new CauseOfBlockage.BecauseNodeIsNotAcceptingTasks(this); } // Looks like we can take the task return null; } /** * Returns a "workspace" directory for the given {@link TopLevelItem}. * * <p> * Workspace directory is usually used for keeping out the checked out * source code, but it can be used for anything. * * @return * null if this node is not connected hence the path is not available */ // TODO: should this be modified now that getWorkspace is moved from AbstractProject to AbstractBuild? public abstract @CheckForNull FilePath getWorkspaceFor(TopLevelItem item); /** * Gets the root directory of this node. * * <p> * Hudson always owns a directory on every node. This method * returns that. * * @return * null if the node is offline and hence the {@link FilePath} * object is not available. */ public abstract @CheckForNull FilePath getRootPath(); /** * Gets the {@link FilePath} on this node. */ public @CheckForNull FilePath createPath(String absolutePath) { VirtualChannel ch = getChannel(); if(ch==null) return null; // offline return new FilePath(ch,absolutePath); } public FileSystemProvisioner getFileSystemProvisioner() { // TODO: make this configurable or auto-detectable or something else return FileSystemProvisioner.DEFAULT; } /** * Gets the {@link NodeProperty} instances configured for this {@link Node}. */ public abstract @Nonnull DescribableList<NodeProperty<?>, NodePropertyDescriptor> getNodeProperties(); /** * Gets the specified property or null if the property is not configured for this Node. * * @param clazz the type of the property * * @return null if the property is not configured * * @since 2.37 */ @CheckForNull public <T extends NodeProperty> T getNodeProperty(Class<T> clazz) { for (NodeProperty p: getNodeProperties()) { if (clazz.isInstance(p)) { return clazz.cast(p); } } return null; } /** * Gets the property from the given classname or null if the property * is not configured for this Node. * * @param className The classname of the property * * @return null if the property is not configured * * @since 2.37 */ @CheckForNull public NodeProperty getNodeProperty(String className) { for (NodeProperty p: getNodeProperties()) { if (p.getClass().getName().equals(className)) { return p; } } return null; } // used in the Jelly script to expose descriptors public List<NodePropertyDescriptor> getNodePropertyDescriptors() { return NodeProperty.for_(this); } public ACL getACL() { return Jenkins.getInstance().getAuthorizationStrategy().getACL(this); } public Node reconfigure(final StaplerRequest req, JSONObject form) throws FormException { if (form==null) return null; final JSONObject jsonForProperties = form.optJSONObject("nodeProperties"); final AtomicReference<BindInterceptor> old = new AtomicReference<>(); old.set(req.setBindListener(new BindInterceptor() { @Override public Object onConvert(Type targetType, Class targetTypeErasure, Object jsonSource) { if (jsonForProperties != jsonSource) { return old.get().onConvert(targetType, targetTypeErasure, jsonSource); } try { DescribableList<NodeProperty<?>, NodePropertyDescriptor> tmp = new DescribableList<NodeProperty<?>, NodePropertyDescriptor>(Saveable.NOOP,getNodeProperties().toList()); tmp.rebuild(req, jsonForProperties, NodeProperty.all()); return tmp.toList(); } catch (FormException e) { throw new IllegalArgumentException(e); } catch (IOException e) { throw new IllegalArgumentException(e); } } })); try { return getDescriptor().newInstance(req, form); } finally { req.setBindListener(old.get()); } } public abstract NodeDescriptor getDescriptor(); /** * Estimates the clock difference with this agent. * * @return * always non-null. * @throws InterruptedException * if the operation is aborted. */ public ClockDifference getClockDifference() throws IOException, InterruptedException { VirtualChannel channel = getChannel(); if(channel==null) throw new IOException(getNodeName()+" is offline"); return channel.call(getClockDifferenceCallable()); } /** * Returns a {@link Callable} that when run on the channel, estimates the clock difference. * * @return * always non-null. * @since 1.522 */ public abstract Callable<ClockDifference,IOException> getClockDifferenceCallable(); /** * Constants that control how Hudson allocates jobs to agents. */ public enum Mode { NORMAL(Messages._Node_Mode_NORMAL()), EXCLUSIVE(Messages._Node_Mode_EXCLUSIVE()); private final Localizable description; public String getDescription() { return description.toString(); } public String getName() { return name(); } Mode(Localizable description) { this.description = description; } static { Stapler.CONVERT_UTILS.register(new EnumConverter(), Mode.class); } } }
package org.edumo.gui.button; import processing.core.PApplet; import processing.core.PFont; import processing.core.PGraphics; import processing.core.PVector; /** * Clase para el renderizado de botones de texto * * @author edumo * */ public class ButtonText extends AbstractButton { private String label = null; private String placeholder = null; private boolean renderPlaceHolder = true; private boolean withRectBox = false; private int rectBoxColor = -1; private int textColor = 0; private int textSize = 12; private PFont font; private PVector textOffset = new PVector(); private int maxLetters = 0; private PGraphics canvas; // private int xOffset = 0; // private int yOffset = 0; private float padding = .1f;// 15% // public int getxOffset() { // return xOffset; // } // // public void setxOffset(int xOffset) { // this.xOffset = xOffset; // } // // public int getyOffset() { // return yOffset; // } // // public void setyOffset(int yOffset) { // this.yOffset = yOffset; // } public PFont getFont() { return font; } public int getMaxLetters() { return maxLetters; } public void setMaxLetters(int maxLetters) { this.maxLetters = maxLetters; } public boolean isRenderPlaceHolder() { return renderPlaceHolder; } public void setRenderPlaceHolder(boolean renderPlaceHolder) { this.renderPlaceHolder = renderPlaceHolder; } public void setFont(PFont font) { this.font = font; } public int getTextColor() { return textColor; } public void setTextColor(int textColor) { this.textColor = textColor; } public int getTextSize() { return textSize; } public boolean isWithRectBox() { return withRectBox; } public void setWithRectBox(boolean withRectBox) { this.withRectBox = withRectBox; } public int getRectBoxColor() { return rectBoxColor; } public void setRectBoxColor(int rectBoxColor) { this.rectBoxColor = rectBoxColor; setWithRectBox(true); } public void setTextSize(int textSize) { this.textSize = textSize; } public String getPlaceholder() { return placeholder; } public void setPlaceholder(String placeholder) { this.placeholder = placeholder; } public int getTextAlign() { return textAlign; } public void setTextAlign(int textAlign) { this.textAlign = textAlign; } public void setRealTextWidth(float realTextWidth) { this.realTextWidth = realTextWidth; } float realTextWidth = 0; int textAlign = 0; public float getRealTextWidth() { return realTextWidth; } public void init(PGraphics canvas, String name, String action, PVector pos, int textSize, int textColor) { init(canvas, name, action, pos, textSize, textColor, PApplet.CENTER); } /** * init del botn meidante el tamao de un texto * * @param cavnas * @param name * @param action * @param pos * @param text * @param textSize */ public void init(PGraphics cavnas, String name, String action, PVector pos, int textSize, int textColor, int textAlign) { this.textColor = textColor; label = name; this.canvas = cavnas; width = (int) cavnas.textWidth(label); height = textSize; this.textSize = textSize; super.init(action, pos); this.textAlign = textAlign; super.posTarget = pos.get(); } public void init(String name, String action, PVector pos, int w, int h, int textSize) { init(name, action, pos, w, h, textSize, PApplet.CENTER); } /** * init del botn * * @param cavnas * @param name * @param action * @param pos * @param text * @param textSize */ public void init(String name, String action, PVector pos, int w, int h, int textSize, int textAlign) { label = name; this.width = w; this.textSize = textSize; this.height = h; this.textAlign = textAlign; super.init(action, pos); } /** * este init permite crear un botn sin texto, es decir un area clicable * vacia * * @param cavnas * @param width * @param action * @param pos * @param text * @param textSize */ public void init(PGraphics cavnas, int width, String action, PVector pos, int textSize) { label = ""; this.width = width; this.height = textSize; this.textSize = textSize; super.init(action, pos); } @Override public String drawUndecorated(PGraphics canvas) { if (label == null) return null; canvas.pushMatrix(); canvas.pushStyle(); canvas.translate(pos.x, pos.y); updateRealPos(canvas); if (withRectBox) { // if (rectBoxColor > -1) if (rectBoxColor != 255) canvas.fill(rectBoxColor, 100); else canvas.fill(rectBoxColor); canvas.rectMode(PApplet.CENTER); canvas.rect(0, 0, getWidth(), getHeight()); } canvas.noFill(); canvas.strokeWeight(2); canvas.stroke(255, 0, 0); canvas.textAlign(textAlign); if (pressed) { canvas.textSize(textSize /** 1.1f */ ); } else { canvas.fill(textColor); canvas.textSize(textSize); } if (font != null) { canvas.textFont(font, textSize); } // canvas.noFill(); // if (textAlign == PApplet.CENTER) // canvas.rect(-width / 2, -height / 2, width, height); // else // canvas.rect(0, 0, width, height); // if (textAlign == PApplet.CENTER) // canvas.text(label, width / 2 + xOffset, height / 2 + textSize / 4 // + yOffset); // else // canvas.text(label, xOffset, textSize + yOffset); // if (textAlign == PApplet.CENTER) // canvas.text(label, width / 2, height / 2 + textSize / 4); // else String tempLabel = null; if (label != null && !label.equals("")) { tempLabel = label; } else { if (placeholder != null && renderPlaceHolder) { tempLabel = placeholder; } else { tempLabel = label; } } // Draw label canvas.pushMatrix(); // Apply offset if needed canvas.translate(textOffset.x, textOffset.y); if (imageMode == PApplet.CORNER) { if (textAlign == PApplet.CENTER) { canvas.text(tempLabel, width / 2, height / 2); } else { canvas.text(tempLabel, 0, height / 2); } } else { if (textAlign == PApplet.CENTER) { canvas.text(tempLabel, 0, textSize / 2); } else { canvas.text(tempLabel, -width / 2 + 10, textSize / 2); } } realTextWidth = canvas.textWidth(label); canvas.popMatrix(); // canvas.textAlign(PApplet.CORNER); canvas.popMatrix(); canvas.popStyle(); return null; } public boolean inPolyCheck(PVector v, PVector[] p) { float a = 0; for (int i = 0; i < p.length - 1; ++i) { PVector v1 = p[i].get(); PVector v2 = p[i + 1].get(); a += vAtan2cent180(v, v1, v2); } PVector v1 = p[p.length - 1].get(); PVector v2 = p[0].get(); a += vAtan2cent180(v, v1, v2); // if (a < 0.001) println(degrees(a)); if (PApplet.abs(PApplet.abs(a) - PApplet.TWO_PI) < 0.01) return true; else return false; } float vAtan2cent180(PVector cent, PVector v2, PVector v1) { PVector vA = v1.get(); PVector vB = v2.get(); vA.sub(cent); vB.sub(cent); vB.mult(-1); float ang = PApplet.atan2(vB.x, vB.y) - PApplet.atan2(vA.x, vA.y); if (ang < 0) ang = PApplet.TWO_PI + ang; ang -= PApplet.PI; return ang; } @Override public boolean isOver(PVector pos) { boolean over = false; if (pos == null) { return false; } if (realPos1 != null) { PVector[] pp = { realPos1, realPos2, realPos3, realPos4 };// new // PVector[4]; over = inPolyCheck(pos, pp); } else if (textAlign == PApplet.CENTER) { if (realPos != null && pos.x > realPos.x - width / 2 && pos.x < realPos.x + width) { if (pos.y > realPos.y - height / 2 && pos.y < realPos.y + height / 2) { over = true; } } } else { if (realPos != null && pos.x > realPos.x - width / 2 && pos.x < realPos.x + width) { if (pos.y + height / 2 > realPos.y && pos.y < realPos.y + height / 2) { over = true; } } } System.out.println("is over " + over); return over; } public String getLabel() { return label; } public void setLabel(String label) { String lastLAbel = this.label; if (label != null) this.label = label; if (canvas != null) { canvas.textSize(textSize); float w = canvas.textWidth(label); float w2 = canvas.textWidth(lastLAbel); if (w > width - textSize / 2 && w > w2) { System.out.println("tenemos un problema no cabe"); this.label = lastLAbel; } } // chekeamos si cabe } public void setText(String label, int textSize, int align) { setLabel(label); setTextSize(textSize); setTextAlign(align); } public void setText(String label, int textSize, int align, int color) { setTextColor(color); setText(label, textSize, align); } public void setTextOffset(float x, float y) { textOffset.x = x; textOffset.y = y; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.jmh.fetcher; import kafka.api.ApiVersion; import kafka.api.ApiVersion$; import kafka.cluster.BrokerEndPoint; import kafka.cluster.DelayedOperations; import kafka.cluster.IsrChangeListener; import kafka.cluster.Partition; import kafka.log.CleanerConfig; import kafka.log.Defaults; import kafka.log.LogAppendInfo; import kafka.log.LogConfig; import kafka.log.LogManager; import kafka.server.AlterIsrManager; import kafka.server.BrokerTopicStats; import kafka.server.FailedPartitions; import kafka.server.InitialFetchState; import kafka.server.KafkaConfig; import kafka.server.LogDirFailureChannel; import kafka.server.MetadataCache; import kafka.server.OffsetAndEpoch; import kafka.server.OffsetTruncationState; import kafka.server.QuotaFactory; import kafka.server.ReplicaFetcherThread; import kafka.server.ReplicaManager; import kafka.server.ReplicaQuota; import kafka.server.builders.LogManagerBuilder; import kafka.server.builders.ReplicaManagerBuilder; import kafka.server.checkpoints.OffsetCheckpoints; import kafka.server.metadata.MockConfigRepository; import kafka.server.metadata.ZkMetadataCache; import kafka.utils.KafkaScheduler; import kafka.utils.MockTime; import kafka.utils.Pool; import kafka.utils.TestUtils; import kafka.zk.KafkaZkClient; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.TopicIdPartition; import org.apache.kafka.common.Uuid; import org.apache.kafka.common.message.FetchResponseData; import org.apache.kafka.common.message.LeaderAndIsrRequestData; import org.apache.kafka.common.message.OffsetForLeaderEpochRequestData.OffsetForLeaderPartition; import org.apache.kafka.common.message.OffsetForLeaderEpochResponseData.EpochEndOffset; import org.apache.kafka.common.message.UpdateMetadataRequestData; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.protocol.ApiKeys; import org.apache.kafka.common.protocol.Errors; import org.apache.kafka.common.record.BaseRecords; import org.apache.kafka.common.record.RecordsSend; import org.apache.kafka.common.requests.FetchRequest; import org.apache.kafka.common.requests.FetchResponse; import org.apache.kafka.common.requests.UpdateMetadataRequest; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Utils; import org.mockito.Mockito; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Level; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Properties; import java.util.UUID; import java.util.concurrent.TimeUnit; import scala.Option; import scala.collection.Iterator; import scala.collection.Map; @State(Scope.Benchmark) @Fork(value = 1) @Warmup(iterations = 5) @Measurement(iterations = 15) @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public class ReplicaFetcherThreadBenchmark { @Param({"100", "500", "1000", "5000"}) private int partitionCount; private ReplicaFetcherBenchThread fetcher; private LogManager logManager; private File logDir = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); private KafkaScheduler scheduler = new KafkaScheduler(1, "scheduler", true); private Pool<TopicPartition, Partition> pool = new Pool<TopicPartition, Partition>(Option.empty()); private Metrics metrics = new Metrics(); private ReplicaManager replicaManager; private Option<Uuid> topicId = Option.apply(Uuid.randomUuid()); @Setup(Level.Trial) public void setup() throws IOException { if (!logDir.mkdir()) throw new IOException("error creating test directory"); scheduler.startup(); Properties props = new Properties(); props.put("zookeeper.connect", "127.0.0.1:9999"); KafkaConfig config = new KafkaConfig(props); LogConfig logConfig = createLogConfig(); BrokerTopicStats brokerTopicStats = new BrokerTopicStats(); LogDirFailureChannel logDirFailureChannel = Mockito.mock(LogDirFailureChannel.class); List<File> logDirs = Collections.singletonList(logDir); logManager = new LogManagerBuilder(). setLogDirs(logDirs). setInitialOfflineDirs(Collections.emptyList()). setConfigRepository(new MockConfigRepository()). setInitialDefaultConfig(logConfig). setCleanerConfig(new CleanerConfig(0, 0, 0, 0, 0, 0.0, 0, false, "MD5")). setRecoveryThreadsPerDataDir(1). setFlushCheckMs(1000L). setFlushRecoveryOffsetCheckpointMs(10000L). setFlushStartOffsetCheckpointMs(10000L). setRetentionCheckMs(1000L). setMaxPidExpirationMs(60000). setInterBrokerProtocolVersion(ApiVersion.latestVersion()). setScheduler(scheduler). setBrokerTopicStats(brokerTopicStats). setLogDirFailureChannel(logDirFailureChannel). setTime(Time.SYSTEM). setKeepPartitionMetadataFile(true). build(); LinkedHashMap<TopicIdPartition, FetchResponseData.PartitionData> initialFetched = new LinkedHashMap<>(); HashMap<String, Uuid> topicIds = new HashMap<>(); scala.collection.mutable.Map<TopicPartition, InitialFetchState> initialFetchStates = new scala.collection.mutable.HashMap<>(); List<UpdateMetadataRequestData.UpdateMetadataPartitionState> updatePartitionState = new ArrayList<>(); for (int i = 0; i < partitionCount; i++) { TopicPartition tp = new TopicPartition("topic", i); List<Integer> replicas = Arrays.asList(0, 1, 2); LeaderAndIsrRequestData.LeaderAndIsrPartitionState partitionState = new LeaderAndIsrRequestData.LeaderAndIsrPartitionState() .setControllerEpoch(0) .setLeader(0) .setLeaderEpoch(0) .setIsr(replicas) .setZkVersion(1) .setReplicas(replicas) .setIsNew(true); IsrChangeListener isrChangeListener = Mockito.mock(IsrChangeListener.class); OffsetCheckpoints offsetCheckpoints = Mockito.mock(OffsetCheckpoints.class); Mockito.when(offsetCheckpoints.fetch(logDir.getAbsolutePath(), tp)).thenReturn(Option.apply(0L)); AlterIsrManager isrChannelManager = Mockito.mock(AlterIsrManager.class); Partition partition = new Partition(tp, 100, ApiVersion$.MODULE$.latestVersion(), 0, Time.SYSTEM, isrChangeListener, new DelayedOperationsMock(tp), Mockito.mock(MetadataCache.class), logManager, isrChannelManager); partition.makeFollower(partitionState, offsetCheckpoints, topicId); pool.put(tp, partition); initialFetchStates.put(tp, new InitialFetchState(topicId, new BrokerEndPoint(3, "host", 3000), 0, 0)); BaseRecords fetched = new BaseRecords() { @Override public int sizeInBytes() { return 0; } @Override public RecordsSend<? extends BaseRecords> toSend() { return null; } }; initialFetched.put(new TopicIdPartition(topicId.get(), tp), new FetchResponseData.PartitionData() .setPartitionIndex(tp.partition()) .setLastStableOffset(0) .setLogStartOffset(0) .setRecords(fetched)); updatePartitionState.add( new UpdateMetadataRequestData.UpdateMetadataPartitionState() .setTopicName("topic") .setPartitionIndex(i) .setControllerEpoch(0) .setLeader(0) .setLeaderEpoch(0) .setIsr(replicas) .setZkVersion(1) .setReplicas(replicas)); } UpdateMetadataRequest updateMetadataRequest = new UpdateMetadataRequest.Builder(ApiKeys.UPDATE_METADATA.latestVersion(), 0, 0, 0, updatePartitionState, Collections.emptyList(), topicIds).build(); // TODO: fix to support raft ZkMetadataCache metadataCache = new ZkMetadataCache(0); metadataCache.updateMetadata(0, updateMetadataRequest); replicaManager = new ReplicaManagerBuilder(). setConfig(config). setMetrics(metrics). setTime(new MockTime()). setZkClient(Mockito.mock(KafkaZkClient.class)). setScheduler(scheduler). setLogManager(logManager). setQuotaManagers(Mockito.mock(QuotaFactory.QuotaManagers.class)). setBrokerTopicStats(brokerTopicStats). setMetadataCache(metadataCache). setLogDirFailureChannel(new LogDirFailureChannel(logDirs.size())). setAlterIsrManager(TestUtils.createAlterIsrManager()). build(); fetcher = new ReplicaFetcherBenchThread(config, replicaManager, pool); fetcher.addPartitions(initialFetchStates); // force a pass to move partitions to fetching state. We do this in the setup phase // so that we do not measure this time as part of the steady state work fetcher.doWork(); // handle response to engage the incremental fetch session handler fetcher.fetchSessionHandler().handleResponse(FetchResponse.of(Errors.NONE, 0, 999, initialFetched), ApiKeys.FETCH.latestVersion()); } @TearDown(Level.Trial) public void tearDown() throws IOException { metrics.close(); replicaManager.shutdown(false); logManager.shutdown(); scheduler.shutdown(); Utils.delete(logDir); } @Benchmark public long testFetcher() { fetcher.doWork(); return fetcher.fetcherStats().requestRate().count(); } // avoid mocked DelayedOperations to avoid mocked class affecting benchmark results private static class DelayedOperationsMock extends DelayedOperations { DelayedOperationsMock(TopicPartition topicPartition) { super(topicPartition, null, null, null); } @Override public int numDelayedDelete() { return 0; } } private static LogConfig createLogConfig() { Properties logProps = new Properties(); logProps.put(LogConfig.SegmentMsProp(), Defaults.SegmentMs()); logProps.put(LogConfig.SegmentBytesProp(), Defaults.SegmentSize()); logProps.put(LogConfig.RetentionMsProp(), Defaults.RetentionMs()); logProps.put(LogConfig.RetentionBytesProp(), Defaults.RetentionSize()); logProps.put(LogConfig.SegmentJitterMsProp(), Defaults.SegmentJitterMs()); logProps.put(LogConfig.CleanupPolicyProp(), Defaults.CleanupPolicy()); logProps.put(LogConfig.MaxMessageBytesProp(), Defaults.MaxMessageSize()); logProps.put(LogConfig.IndexIntervalBytesProp(), Defaults.IndexInterval()); logProps.put(LogConfig.SegmentIndexBytesProp(), Defaults.MaxIndexSize()); logProps.put(LogConfig.FileDeleteDelayMsProp(), Defaults.FileDeleteDelayMs()); return LogConfig.apply(logProps, new scala.collection.immutable.HashSet<>()); } static class ReplicaFetcherBenchThread extends ReplicaFetcherThread { private final Pool<TopicPartition, Partition> pool; ReplicaFetcherBenchThread(KafkaConfig config, ReplicaManager replicaManager, Pool<TopicPartition, Partition> partitions) { super("name", 3, new BrokerEndPoint(3, "host", 3000), config, new FailedPartitions(), replicaManager, new Metrics(), Time.SYSTEM, new ReplicaQuota() { @Override public boolean isQuotaExceeded() { return false; } @Override public void record(long value) { } @Override public boolean isThrottled(TopicPartition topicPartition) { return false; } }, Option.empty()); pool = partitions; } @Override public Option<Object> latestEpoch(TopicPartition topicPartition) { return Option.apply(0); } @Override public long logStartOffset(TopicPartition topicPartition) { return pool.get(topicPartition).localLogOrException().logStartOffset(); } @Override public long logEndOffset(TopicPartition topicPartition) { return 0; } @Override public void truncate(TopicPartition tp, OffsetTruncationState offsetTruncationState) { // pretend to truncate to move to Fetching state } @Override public Option<OffsetAndEpoch> endOffsetForEpoch(TopicPartition topicPartition, int epoch) { return Option.apply(new OffsetAndEpoch(0, 0)); } @Override public Option<LogAppendInfo> processPartitionData(TopicPartition topicPartition, long fetchOffset, FetchResponseData.PartitionData partitionData) { return Option.empty(); } @Override public long fetchEarliestOffsetFromLeader(TopicPartition topicPartition, int currentLeaderEpoch) { return 0; } @Override public Map<TopicPartition, EpochEndOffset> fetchEpochEndOffsets(Map<TopicPartition, OffsetForLeaderPartition> partitions) { scala.collection.mutable.Map<TopicPartition, EpochEndOffset> endOffsets = new scala.collection.mutable.HashMap<>(); Iterator<TopicPartition> iterator = partitions.keys().iterator(); while (iterator.hasNext()) { TopicPartition tp = iterator.next(); endOffsets.put(tp, new EpochEndOffset() .setPartition(tp.partition()) .setErrorCode(Errors.NONE.code()) .setLeaderEpoch(0) .setEndOffset(100)); } return endOffsets; } @Override public Map<TopicPartition, FetchResponseData.PartitionData> fetchFromLeader(FetchRequest.Builder fetchRequest) { return new scala.collection.mutable.HashMap<>(); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.s3csvinput; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.text.DecimalFormat; import com.amazonaws.services.s3.model.Bucket; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransPreviewFactory; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.steps.textfileinput.TextFileInput; import org.pentaho.di.trans.steps.textfileinput.TextFileInputField; import org.pentaho.di.trans.steps.textfileinput.TextFileInputMeta; import org.pentaho.di.ui.core.dialog.EnterNumberDialog; import org.pentaho.di.ui.core.dialog.EnterSelectionDialog; import org.pentaho.di.ui.core.dialog.EnterTextDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.dialog.PreviewRowsDialog; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ComboValuesSelectionListener; import org.pentaho.di.ui.core.widget.PasswordTextVar; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.dialog.TransPreviewProgressDialog; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.di.ui.trans.steps.textfileinput.TextFileCSVImportProgressDialog; public class S3CsvInputDialog extends BaseStepDialog implements StepDialogInterface { private S3CsvInputMeta inputMeta; private TextVar wAccessKey; private TextVar wSecretKey; private TextVar wBucket; private Button wbBucket; // browse for a bucket. private TextVar wFilename; private CCombo wFilenameField; private Button wbbFilename; // Browse for a file private Button wIncludeFilename; private TextVar wRowNumField; private Button wbDelimiter; private TextVar wDelimiter; private TextVar wEnclosure; private TextVar wMaxLineSize; private Button wLazyConversion; private Button wHeaderPresent; private TableView wFields; private boolean isReceivingInput; private Button wRunningInParallel; public S3CsvInputDialog( Shell parent, Object in, TransMeta tr, String sname ) { super( parent, (BaseStepMeta) in, tr, sname ); inputMeta = (S3CsvInputMeta) in; } @Override public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX ); props.setLook( shell ); setShellImage( shell, inputMeta ); ModifyListener lsMod = new ModifyListener() { @Override public void modifyText( ModifyEvent e ) { inputMeta.setChanged(); } }; changed = inputMeta.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( Messages.getString( "S3CsvInputDialog.Shell.Title" ) ); //$NON-NLS-1$ int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Step name line // wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( Messages.getString( "S3CsvInputDialog.Stepname.Label" ) ); //$NON-NLS-1$ props.setLook( wlStepname ); fdlStepname = new FormData(); fdlStepname.left = new FormAttachment( 0, 0 ); fdlStepname.right = new FormAttachment( middle, -margin ); fdlStepname.top = new FormAttachment( 0, margin ); wlStepname.setLayoutData( fdlStepname ); wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); fdStepname = new FormData(); fdStepname.left = new FormAttachment( middle, 0 ); fdStepname.top = new FormAttachment( 0, margin ); fdStepname.right = new FormAttachment( 100, 0 ); wStepname.setLayoutData( fdStepname ); Control lastControl = wStepname; // Access key Label wlAccessKey = new Label( shell, SWT.RIGHT ); wlAccessKey.setText( Messages.getString( "S3CsvInputDialog.AccessKey.Label" ) ); //$NON-NLS-1$ props.setLook( wlAccessKey ); FormData fdlAccessKey = new FormData(); fdlAccessKey.top = new FormAttachment( lastControl, margin ); fdlAccessKey.left = new FormAttachment( 0, 0 ); fdlAccessKey.right = new FormAttachment( middle, -margin ); wlAccessKey.setLayoutData( fdlAccessKey ); wAccessKey = new PasswordTextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wAccessKey ); wAccessKey.addModifyListener( lsMod ); FormData fdAccessKey = new FormData(); fdAccessKey.top = new FormAttachment( lastControl, margin ); fdAccessKey.left = new FormAttachment( middle, 0 ); fdAccessKey.right = new FormAttachment( 100, 0 ); wAccessKey.setLayoutData( fdAccessKey ); lastControl = wAccessKey; // Secret key Label wlSecretKey = new Label( shell, SWT.RIGHT ); wlSecretKey.setText( Messages.getString( "S3CsvInputDialog.SecretKey.Label" ) ); //$NON-NLS-1$ props.setLook( wlSecretKey ); FormData fdlSecretKey = new FormData(); fdlSecretKey.top = new FormAttachment( lastControl, margin ); fdlSecretKey.left = new FormAttachment( 0, 0 ); fdlSecretKey.right = new FormAttachment( middle, -margin ); wlSecretKey.setLayoutData( fdlSecretKey ); wSecretKey = new PasswordTextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wSecretKey ); wSecretKey.addModifyListener( lsMod ); FormData fdSecretKey = new FormData(); fdSecretKey.top = new FormAttachment( lastControl, margin ); fdSecretKey.left = new FormAttachment( middle, 0 ); fdSecretKey.right = new FormAttachment( 100, 0 ); wSecretKey.setLayoutData( fdSecretKey ); lastControl = wSecretKey; // Bucket name Label wlBucket = new Label( shell, SWT.RIGHT ); wlBucket.setText( Messages.getString( "S3CsvInputDialog.Bucket.Label" ) ); //$NON-NLS-1$ props.setLook( wlBucket ); FormData fdlBucket = new FormData(); fdlBucket.top = new FormAttachment( lastControl, margin ); fdlBucket.left = new FormAttachment( 0, 0 ); fdlBucket.right = new FormAttachment( middle, -margin ); wlBucket.setLayoutData( fdlBucket ); wbBucket = new Button( shell, SWT.PUSH | SWT.CENTER ); props.setLook( wbBucket ); wbBucket.setText( Messages.getString( "S3CsvInputDialog.Bucket.Button" ) ); FormData fdbBucket = new FormData(); fdbBucket.top = new FormAttachment( lastControl, margin ); fdbBucket.right = new FormAttachment( 100, 0 ); wbBucket.setLayoutData( fdbBucket ); wBucket = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wBucket ); wBucket.addModifyListener( lsMod ); FormData fdBucket = new FormData(); fdBucket.top = new FormAttachment( lastControl, margin ); fdBucket.left = new FormAttachment( middle, 0 ); fdBucket.right = new FormAttachment( wbBucket, -margin ); wBucket.setLayoutData( fdBucket ); lastControl = wBucket; // See if the step receives input. If so, we don't ask for the filename, but for the filename field. // isReceivingInput = transMeta.findNrPrevSteps( stepMeta ) > 0; if ( isReceivingInput ) { RowMetaInterface previousFields; try { previousFields = transMeta.getPrevStepFields( stepMeta ); } catch ( KettleStepException e ) { new ErrorDialog( shell, Messages.getString( "S3CsvInputDialog.ErrorDialog.UnableToGetInputFields.Title" ), Messages.getString( "S3CsvInputDialog.ErrorDialog.UnableToGetInputFields.Message" ), e ); previousFields = new RowMeta(); } // The filename field ... // Label wlFilename = new Label( shell, SWT.RIGHT ); wlFilename.setText( Messages.getString( "S3CsvInputDialog.FilenameField.Label" ) ); //$NON-NLS-1$ props.setLook( wlFilename ); FormData fdlFilename = new FormData(); fdlFilename.top = new FormAttachment( lastControl, margin ); fdlFilename.left = new FormAttachment( 0, 0 ); fdlFilename.right = new FormAttachment( middle, -margin ); wlFilename.setLayoutData( fdlFilename ); wFilenameField = new CCombo( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wFilenameField.setItems( previousFields.getFieldNames() ); props.setLook( wFilenameField ); wFilenameField.addModifyListener( lsMod ); FormData fdFilename = new FormData(); fdFilename.top = new FormAttachment( lastControl, margin ); fdFilename.left = new FormAttachment( middle, 0 ); fdFilename.right = new FormAttachment( 100, 0 ); wFilenameField.setLayoutData( fdFilename ); lastControl = wFilenameField; // Checkbox to include the filename in the output... // Label wlIncludeFilename = new Label( shell, SWT.RIGHT ); wlIncludeFilename.setText( Messages.getString( "S3CsvInputDialog.IncludeFilenameField.Label" ) ); //$NON-NLS-1$ props.setLook( wlIncludeFilename ); FormData fdlIncludeFilename = new FormData(); fdlIncludeFilename.top = new FormAttachment( lastControl, margin ); fdlIncludeFilename.left = new FormAttachment( 0, 0 ); fdlIncludeFilename.right = new FormAttachment( middle, -margin ); wlIncludeFilename.setLayoutData( fdlIncludeFilename ); wIncludeFilename = new Button( shell, SWT.CHECK ); props.setLook( wIncludeFilename ); wFilenameField.addModifyListener( lsMod ); FormData fdIncludeFilename = new FormData(); fdIncludeFilename.top = new FormAttachment( lastControl, margin ); fdIncludeFilename.left = new FormAttachment( middle, 0 ); fdIncludeFilename.right = new FormAttachment( 100, 0 ); wIncludeFilename.setLayoutData( fdIncludeFilename ); lastControl = wIncludeFilename; } else { // Filename... // // The filename browse button // wbbFilename = new Button( shell, SWT.PUSH | SWT.CENTER ); props.setLook( wbbFilename ); wbbFilename.setText( Messages.getString( "System.Button.Browse" ) ); wbbFilename.setToolTipText( Messages.getString( "System.Tooltip.BrowseForFileOrDirAndAdd" ) ); FormData fdbFilename = new FormData(); fdbFilename.top = new FormAttachment( lastControl, margin ); fdbFilename.right = new FormAttachment( 100, 0 ); wbbFilename.setLayoutData( fdbFilename ); // The field itself... // Label wlFilename = new Label( shell, SWT.RIGHT ); wlFilename.setText( Messages.getString( "S3CsvInputDialog.Filename.Label" ) ); //$NON-NLS-1$ props.setLook( wlFilename ); FormData fdlFilename = new FormData(); fdlFilename.top = new FormAttachment( lastControl, margin ); fdlFilename.left = new FormAttachment( 0, 0 ); fdlFilename.right = new FormAttachment( middle, -margin ); wlFilename.setLayoutData( fdlFilename ); wFilename = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wFilename ); wFilename.addModifyListener( lsMod ); FormData fdFilename = new FormData(); fdFilename.top = new FormAttachment( lastControl, margin ); fdFilename.left = new FormAttachment( middle, 0 ); fdFilename.right = new FormAttachment( wbbFilename, -margin ); wFilename.setLayoutData( fdFilename ); lastControl = wFilename; } // delimiter Label wlDelimiter = new Label( shell, SWT.RIGHT ); wlDelimiter.setText( Messages.getString( "S3CsvInputDialog.Delimiter.Label" ) ); //$NON-NLS-1$ props.setLook( wlDelimiter ); FormData fdlDelimiter = new FormData(); fdlDelimiter.top = new FormAttachment( lastControl, margin ); fdlDelimiter.left = new FormAttachment( 0, 0 ); fdlDelimiter.right = new FormAttachment( middle, -margin ); wlDelimiter.setLayoutData( fdlDelimiter ); wbDelimiter = new Button( shell, SWT.PUSH | SWT.CENTER ); props.setLook( wbDelimiter ); wbDelimiter.setText( Messages.getString( "S3CsvInputDialog.Delimiter.Button" ) ); FormData fdbDelimiter = new FormData(); fdbDelimiter.top = new FormAttachment( lastControl, margin ); fdbDelimiter.right = new FormAttachment( 100, 0 ); wbDelimiter.setLayoutData( fdbDelimiter ); wDelimiter = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wDelimiter ); wDelimiter.addModifyListener( lsMod ); FormData fdDelimiter = new FormData(); fdDelimiter.top = new FormAttachment( lastControl, margin ); fdDelimiter.left = new FormAttachment( middle, 0 ); fdDelimiter.right = new FormAttachment( wbDelimiter, -margin ); wDelimiter.setLayoutData( fdDelimiter ); lastControl = wDelimiter; // enclosure Label wlEnclosure = new Label( shell, SWT.RIGHT ); wlEnclosure.setText( Messages.getString( "S3CsvInputDialog.Enclosure.Label" ) ); //$NON-NLS-1$ props.setLook( wlEnclosure ); FormData fdlEnclosure = new FormData(); fdlEnclosure.top = new FormAttachment( lastControl, margin ); fdlEnclosure.left = new FormAttachment( 0, 0 ); fdlEnclosure.right = new FormAttachment( middle, -margin ); wlEnclosure.setLayoutData( fdlEnclosure ); wEnclosure = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wEnclosure ); wEnclosure.addModifyListener( lsMod ); FormData fdEnclosure = new FormData(); fdEnclosure.top = new FormAttachment( lastControl, margin ); fdEnclosure.left = new FormAttachment( middle, 0 ); fdEnclosure.right = new FormAttachment( 100, 0 ); wEnclosure.setLayoutData( fdEnclosure ); lastControl = wEnclosure; // Max line size // Label wlMaxLineSize = new Label( shell, SWT.RIGHT ); wlMaxLineSize.setText( Messages.getString( "S3CsvInputDialog.MaxLineSize.Label" ) ); //$NON-NLS-1$ props.setLook( wlMaxLineSize ); FormData fdlMaxLineSize = new FormData(); fdlMaxLineSize.top = new FormAttachment( lastControl, margin ); fdlMaxLineSize.left = new FormAttachment( 0, 0 ); fdlMaxLineSize.right = new FormAttachment( middle, -margin ); wlMaxLineSize.setLayoutData( fdlMaxLineSize ); wMaxLineSize = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wMaxLineSize ); wMaxLineSize.addModifyListener( lsMod ); FormData fdMaxLineSize = new FormData(); fdMaxLineSize.top = new FormAttachment( lastControl, margin ); fdMaxLineSize.left = new FormAttachment( middle, 0 ); fdMaxLineSize.right = new FormAttachment( 100, 0 ); wMaxLineSize.setLayoutData( fdMaxLineSize ); lastControl = wMaxLineSize; // performingLazyConversion? // Label wlLazyConversion = new Label( shell, SWT.RIGHT ); wlLazyConversion.setText( Messages.getString( "S3CsvInputDialog.LazyConversion.Label" ) ); //$NON-NLS-1$ props.setLook( wlLazyConversion ); FormData fdlLazyConversion = new FormData(); fdlLazyConversion.top = new FormAttachment( lastControl, margin ); fdlLazyConversion.left = new FormAttachment( 0, 0 ); fdlLazyConversion.right = new FormAttachment( middle, -margin ); wlLazyConversion.setLayoutData( fdlLazyConversion ); wLazyConversion = new Button( shell, SWT.CHECK ); props.setLook( wLazyConversion ); FormData fdLazyConversion = new FormData(); fdLazyConversion.top = new FormAttachment( lastControl, margin ); fdLazyConversion.left = new FormAttachment( middle, 0 ); fdLazyConversion.right = new FormAttachment( 100, 0 ); wLazyConversion.setLayoutData( fdLazyConversion ); lastControl = wLazyConversion; // header row? // Label wlHeaderPresent = new Label( shell, SWT.RIGHT ); wlHeaderPresent.setText( Messages.getString( "S3CsvInputDialog.HeaderPresent.Label" ) ); //$NON-NLS-1$ props.setLook( wlHeaderPresent ); FormData fdlHeaderPresent = new FormData(); fdlHeaderPresent.top = new FormAttachment( lastControl, margin ); fdlHeaderPresent.left = new FormAttachment( 0, 0 ); fdlHeaderPresent.right = new FormAttachment( middle, -margin ); wlHeaderPresent.setLayoutData( fdlHeaderPresent ); wHeaderPresent = new Button( shell, SWT.CHECK ); props.setLook( wHeaderPresent ); FormData fdHeaderPresent = new FormData(); fdHeaderPresent.top = new FormAttachment( lastControl, margin ); fdHeaderPresent.left = new FormAttachment( middle, 0 ); fdHeaderPresent.right = new FormAttachment( 100, 0 ); wHeaderPresent.setLayoutData( fdHeaderPresent ); lastControl = wHeaderPresent; // The field itself... // Label wlRowNumField = new Label( shell, SWT.RIGHT ); wlRowNumField.setText( Messages.getString( "S3CsvInputDialog.RowNumField.Label" ) ); //$NON-NLS-1$ props.setLook( wlRowNumField ); FormData fdlRowNumField = new FormData(); fdlRowNumField.top = new FormAttachment( lastControl, margin ); fdlRowNumField.left = new FormAttachment( 0, 0 ); fdlRowNumField.right = new FormAttachment( middle, -margin ); wlRowNumField.setLayoutData( fdlRowNumField ); wRowNumField = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wRowNumField ); wRowNumField.addModifyListener( lsMod ); FormData fdRowNumField = new FormData(); fdRowNumField.top = new FormAttachment( lastControl, margin ); fdRowNumField.left = new FormAttachment( middle, 0 ); fdRowNumField.right = new FormAttachment( 100, 0 ); wRowNumField.setLayoutData( fdRowNumField ); lastControl = wRowNumField; // running in parallel? // Label wlRunningInParallel = new Label( shell, SWT.RIGHT ); wlRunningInParallel.setText( Messages.getString( "S3CsvInputDialog.RunningInParallel.Label" ) ); //$NON-NLS-1$ props.setLook( wlRunningInParallel ); FormData fdlRunningInParallel = new FormData(); fdlRunningInParallel.top = new FormAttachment( lastControl, margin ); fdlRunningInParallel.left = new FormAttachment( 0, 0 ); fdlRunningInParallel.right = new FormAttachment( middle, -margin ); wlRunningInParallel.setLayoutData( fdlRunningInParallel ); wRunningInParallel = new Button( shell, SWT.CHECK ); props.setLook( wRunningInParallel ); FormData fdRunningInParallel = new FormData(); fdRunningInParallel.top = new FormAttachment( lastControl, margin ); fdRunningInParallel.left = new FormAttachment( middle, 0 ); wRunningInParallel.setLayoutData( fdRunningInParallel ); lastControl = wRunningInParallel; // Some buttons first, so that the dialog scales nicely... // wOK = new Button( shell, SWT.PUSH ); wOK.setText( Messages.getString( "System.Button.OK" ) ); //$NON-NLS-1$ wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( Messages.getString( "System.Button.Cancel" ) ); //$NON-NLS-1$ wPreview = new Button( shell, SWT.PUSH ); wPreview.setText( Messages.getString( "System.Button.Preview" ) ); //$NON-NLS-1$ wPreview.setEnabled( !isReceivingInput ); wGet = new Button( shell, SWT.PUSH ); wGet.setText( Messages.getString( "System.Button.GetFields" ) ); //$NON-NLS-1$ wGet.setEnabled( !isReceivingInput ); setButtonPositions( new Button[] { wOK, wCancel, wPreview, wGet, }, margin, null ); // Fields ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( Messages.getString( "S3CsvInputDialog.NameColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.TypeColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMeta.getTypes(), true ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.FormatColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, Const.getConversionFormats() ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.LengthColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.PrecisionColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.CurrencyColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.DecimalColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.GroupColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "S3CsvInputDialog.TrimTypeColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMeta.trimTypeDesc ), }; colinf[2].setComboValuesSelectionListener( new ComboValuesSelectionListener() { @Override public String[] getComboValues( TableItem tableItem, int rowNr, int colNr ) { String[] comboValues = new String[] { }; int type = ValueMeta.getType( tableItem.getText( colNr - 1 ) ); switch ( type ) { case ValueMetaInterface.TYPE_DATE: comboValues = Const.getDateFormats(); break; case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: case ValueMetaInterface.TYPE_NUMBER: comboValues = Const.getNumberFormats(); break; default: break; } return comboValues; } } ); wFields = new TableView( transMeta, shell, SWT.FULL_SELECTION | SWT.MULTI, colinf, 1, lsMod, props ); FormData fdFields = new FormData(); fdFields.top = new FormAttachment( lastControl, margin * 2 ); fdFields.bottom = new FormAttachment( wOK, -margin * 2 ); fdFields.left = new FormAttachment( 0, 0 ); fdFields.right = new FormAttachment( 100, 0 ); wFields.setLayoutData( fdFields ); // Add listeners lsCancel = new Listener() { @Override public void handleEvent( Event e ) { cancel(); } }; lsOK = new Listener() { @Override public void handleEvent( Event e ) { ok(); } }; lsPreview = new Listener() { @Override public void handleEvent( Event e ) { preview(); } }; lsGet = new Listener() { @Override public void handleEvent( Event e ) { getCSV(); } }; wCancel.addListener( SWT.Selection, lsCancel ); wOK.addListener( SWT.Selection, lsOK ); wPreview.addListener( SWT.Selection, lsPreview ); wGet.addListener( SWT.Selection, lsGet ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; wStepname.addSelectionListener( lsDef ); if ( wFilename != null ) { wFilename.addSelectionListener( lsDef ); } if ( wFilenameField != null ) { wFilenameField.addSelectionListener( lsDef ); } wDelimiter.addSelectionListener( lsDef ); wEnclosure.addSelectionListener( lsDef ); wMaxLineSize.addSelectionListener( lsDef ); wRowNumField.addSelectionListener( lsDef ); // Allow the insertion of tabs as separator... wbDelimiter.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent se ) { Text t = wDelimiter.getTextWidget(); if ( t != null ) { t.insert( "\t" ); } } } ); wbBucket.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent event ) { // List the buckets... // try { S3CsvInputMeta meta = new S3CsvInputMeta(); getInfo( meta ); S3ObjectsProvider s3ObjProvider = new S3ObjectsProvider( meta.getS3Client( transMeta ) ); EnterSelectionDialog dialog = new EnterSelectionDialog( shell, s3ObjProvider.getBucketsNames(), Messages.getString( "S3CsvInputDialog.Exception.SelectBucket.Title" ), Messages.getString( "S3CsvInputDialog.Exception.SelectBucket.Message" ) ); dialog.setMulti( false ); String bucketname = dialog.open(); if ( bucketname != null ) { wBucket.setText( bucketname ); } } catch ( Exception e ) { new ErrorDialog( shell, Messages.getString( "S3CsvInputDialog.Exception.UnableToGetBuckets.Title" ), Messages.getString( "S3CsvInputDialog.Exception.UnableToGetBuckets.Message" ), e ); } } } ); if ( wbbFilename != null ) { // Listen to the browse button next to the file name wbbFilename.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent event ) { try { S3CsvInputMeta meta = new S3CsvInputMeta(); getInfo( meta ); S3ObjectsProvider s3ObjProvider = new S3ObjectsProvider( meta.getS3Client( transMeta ) ); String[] objectnames = s3ObjProvider.getS3ObjectsNames( meta.getBucket() ); EnterSelectionDialog dialog = new EnterSelectionDialog( shell, objectnames, Messages.getString( "S3CsvInputDialog.Exception.SelectObject.Title" ), Messages.getString( "S3CsvInputDialog.Exception.SelectObject.Message" ) ); dialog.setMulti( false ); if ( !Utils.isEmpty( wFilename.getText() ) ) { int index = Const.indexOfString( wFilename.getText(), objectnames ); if ( index >= 0 ) { dialog.setSelectedNrs( new int[] { index, } ); } } String objectname = dialog.open(); if ( objectname != null ) { wFilename.setText( objectname ); } } catch ( Exception e ) { new ErrorDialog( shell, Messages.getString( "S3CsvInputDialog.Exception.UnableToGetFiles.Title" ), Messages.getString( "S3CsvInputDialog.Exception.UnableToGetFiles.Message" ), e ); } } } ); } // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); // Set the shell size, based upon previous time... setSize(); getData(); inputMeta.setChanged( changed ); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } public void getData() { getData( inputMeta ); } /** * Copy information from the meta-data input to the dialog fields. */ public void getData( S3CsvInputMeta inputMeta ) { wStepname.setText( stepname ); wAccessKey.setText( Const.NVL( inputMeta.getAwsAccessKey(), "" ) ); wSecretKey.setText( Const.NVL( inputMeta.getAwsSecretKey(), "" ) ); wBucket.setText( Const.NVL( inputMeta.getBucket(), "" ) ); if ( isReceivingInput ) { wFilenameField.setText( Const.NVL( inputMeta.getFilenameField(), "" ) ); wIncludeFilename.setSelection( inputMeta.isIncludingFilename() ); } else { wFilename.setText( Const.NVL( inputMeta.getFilename(), "" ) ); } wDelimiter.setText( Const.NVL( inputMeta.getDelimiter(), "" ) ); wEnclosure.setText( Const.NVL( inputMeta.getEnclosure(), "" ) ); wMaxLineSize.setText( Const.NVL( inputMeta.getMaxLineSize(), "" ) ); wLazyConversion.setSelection( inputMeta.isLazyConversionActive() ); wHeaderPresent.setSelection( inputMeta.isHeaderPresent() ); wRunningInParallel.setSelection( inputMeta.isRunningInParallel() ); wRowNumField.setText( Const.NVL( inputMeta.getRowNumField(), "" ) ); for ( int i = 0; i < inputMeta.getInputFields().length; i++ ) { TextFileInputField field = inputMeta.getInputFields()[i]; TableItem item = new TableItem( wFields.table, SWT.NONE ); int colnr = 1; item.setText( colnr++, Const.NVL( field.getName(), "" ) ); item.setText( colnr++, ValueMeta.getTypeDesc( field.getType() ) ); item.setText( colnr++, Const.NVL( field.getFormat(), "" ) ); item.setText( colnr++, field.getLength() >= 0 ? Integer.toString( field.getLength() ) : "" ); item.setText( colnr++, field.getPrecision() >= 0 ? Integer.toString( field.getPrecision() ) : "" ); item.setText( colnr++, Const.NVL( field.getCurrencySymbol(), "" ) ); item.setText( colnr++, Const.NVL( field.getDecimalSymbol(), "" ) ); item.setText( colnr++, Const.NVL( field.getGroupSymbol(), "" ) ); item.setText( colnr++, Const.NVL( field.getTrimTypeDesc(), "" ) ); } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); wStepname.selectAll(); } private void cancel() { stepname = null; inputMeta.setChanged( changed ); dispose(); } private void getInfo( S3CsvInputMeta inputMeta ) { inputMeta.setAwsAccessKey( wAccessKey.getText() ); inputMeta.setAwsSecretKey( wSecretKey.getText() ); inputMeta.setBucket( wBucket.getText() ); if ( isReceivingInput ) { inputMeta.setFilenameField( wFilenameField.getText() ); inputMeta.setIncludingFilename( wIncludeFilename.getSelection() ); } else { inputMeta.setFilename( wFilename.getText() ); } inputMeta.setDelimiter( wDelimiter.getText() ); inputMeta.setEnclosure( wEnclosure.getText() ); inputMeta.setMaxLineSize( wMaxLineSize.getText() ); inputMeta.setLazyConversionActive( wLazyConversion.getSelection() ); inputMeta.setHeaderPresent( wHeaderPresent.getSelection() ); inputMeta.setRowNumField( wRowNumField.getText() ); inputMeta.setRunningInParallel( wRunningInParallel.getSelection() ); int nrNonEmptyFields = wFields.nrNonEmpty(); inputMeta.allocate( nrNonEmptyFields ); for ( int i = 0; i < nrNonEmptyFields; i++ ) { TableItem item = wFields.getNonEmpty( i ); inputMeta.getInputFields()[i] = new TextFileInputField(); int colnr = 1; inputMeta.getInputFields()[i].setName( item.getText( colnr++ ) ); inputMeta.getInputFields()[i].setType( ValueMeta.getType( item.getText( colnr++ ) ) ); inputMeta.getInputFields()[i].setFormat( item.getText( colnr++ ) ); inputMeta.getInputFields()[i].setLength( Const.toInt( item.getText( colnr++ ), -1 ) ); inputMeta.getInputFields()[i].setPrecision( Const.toInt( item.getText( colnr++ ), -1 ) ); inputMeta.getInputFields()[i].setCurrencySymbol( item.getText( colnr++ ) ); inputMeta.getInputFields()[i].setDecimalSymbol( item.getText( colnr++ ) ); inputMeta.getInputFields()[i].setGroupSymbol( item.getText( colnr++ ) ); inputMeta.getInputFields()[i].setTrimType( ValueMeta.getTrimTypeByDesc( item.getText( colnr++ ) ) ); } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); inputMeta.setChanged(); } private void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } getInfo( inputMeta ); stepname = wStepname.getText(); dispose(); } // Get the data layout private void getCSV() { InputStream inputStream = null; try { S3CsvInputMeta meta = new S3CsvInputMeta(); getInfo( meta ); String filename = transMeta.environmentSubstitute( meta.getFilename() ); String bucketname = transMeta.environmentSubstitute( meta.getBucket() ); int maxLineSize = Const.toInt( transMeta.environmentSubstitute( meta.getMaxLineSize() ), 2000 ); wFields.table.removeAll(); S3ObjectsProvider s3ObjProvider = new S3ObjectsProvider( meta.getS3Client( transMeta ) ); Bucket s3bucket = s3ObjProvider.getBucket( bucketname ); if ( s3bucket == null ) { throw new Exception( Messages.getString( "S3DefaultService.Exception.UnableToFindBucket.Message", bucketname ) ); } // Now we can continue reading the rows of data and we can guess the // Sample a few lines to determine the correct type of the fields... // String shellText = Messages.getString( "S3CsvInputDialog.LinesToSample.DialogTitle" ); String lineText = Messages.getString( "S3CsvInputDialog.LinesToSample.DialogMessage" ); EnterNumberDialog end = new EnterNumberDialog( shell, 100, shellText, lineText ); int samples = end.open(); if ( samples < 0 ) { return; } // Only get the first lines, not the complete file // And grab an input stream to the data... inputStream = s3ObjProvider.getS3Object( s3bucket, filename, 0L, (long) samples * (long) maxLineSize ).getObjectContent(); InputStreamReader reader = new InputStreamReader( inputStream ); // Read a line of data to determine the number of rows... // String line = TextFileInput.getLine( log, reader, TextFileInputMeta.FILE_FORMAT_MIXED, new StringBuilder( 1000 ) ); // Split the string, header or data into parts... // String[] fieldNames = Const.splitString( line, meta.getDelimiter() ); if ( !meta.isHeaderPresent() ) { // Don't use field names from the header... // Generate field names F1 ... F10 // DecimalFormat df = new DecimalFormat( "000" ); // $NON-NLS-1$ for ( int i = 0; i < fieldNames.length; i++ ) { fieldNames[i] = "Field_" + df.format( i ); // $NON-NLS-1$ } } else { if ( !Utils.isEmpty( meta.getEnclosure() ) ) { for ( int i = 0; i < fieldNames.length; i++ ) { if ( fieldNames[i].startsWith( meta.getEnclosure() ) && fieldNames[i].endsWith( meta.getEnclosure() ) && fieldNames[i].length() > 1 ) { fieldNames[i] = fieldNames[i].substring( 1, fieldNames[i].length() - 1 ); } } } } // Trim the names to make sure... // for ( int i = 0; i < fieldNames.length; i++ ) { fieldNames[i] = Const.trim( fieldNames[i] ); } // Update the GUI // for ( int i = 0; i < fieldNames.length; i++ ) { TableItem item = new TableItem( wFields.table, SWT.NONE ); item.setText( 1, fieldNames[i] ); item.setText( 2, ValueMeta.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); getInfo( meta ); TextFileCSVImportProgressDialog pd = new TextFileCSVImportProgressDialog( shell, meta, transMeta, reader, samples, true ); String message = pd.open(); if ( message != null ) { wFields.removeAll(); // OK, what's the result of our search? getData( meta ); wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); EnterTextDialog etd = new EnterTextDialog( shell, Messages.getString( "S3CsvInputDialog.ScanResults.DialogTitle" ), Messages.getString( "S3CsvInputDialog.ScanResults.DialogMessage" ), message, true ); etd.setReadOnly(); etd.open(); } } catch ( IOException e ) { new ErrorDialog( shell, Messages.getString( "S3CsvInputDialog.IOError.DialogTitle" ), Messages.getString( "S3CsvInputDialog.IOError.DialogMessage" ), e ); } catch ( Exception e ) { new ErrorDialog( shell, Messages.getString( "System.Dialog.Error.Title" ), Messages.getString( "S3CsvInputDialog.ErrorGettingFileDesc.DialogMessage" ), e ); } finally { try { if ( inputStream != null ) { inputStream.close(); } } catch ( Exception e ) { log.logError( stepname, "Error closing s3 data input stream", e ); } } } // Preview the data private void preview() { // Create the XML input step S3CsvInputMeta oneMeta = new S3CsvInputMeta(); getInfo( oneMeta ); TransMeta previewMeta = TransPreviewFactory.generatePreviewTransformation( transMeta, oneMeta, wStepname.getText() ); EnterNumberDialog numberDialog = new EnterNumberDialog( shell, props.getDefaultPreviewSize(), Messages.getString( "S3CsvInputDialog.PreviewSize.DialogTitle" ), Messages.getString( "S3CsvInputDialog.PreviewSize.DialogMessage" ) ); int previewSize = numberDialog.open(); if ( previewSize > 0 ) { TransPreviewProgressDialog progressDialog = new TransPreviewProgressDialog( shell, previewMeta, new String[] { wStepname.getText() }, new int[] { previewSize } ); progressDialog.open(); Trans trans = progressDialog.getTrans(); String loggingText = progressDialog.getLoggingText(); if ( !progressDialog.isCancelled() ) { if ( trans.getResult() != null && trans.getResult().getNrErrors() > 0 ) { EnterTextDialog etd = new EnterTextDialog( shell, Messages.getString( "System.Dialog.PreviewError.Title" ), Messages.getString( "System.Dialog.PreviewError.Message" ), loggingText, true ); etd.setReadOnly(); etd.open(); } } PreviewRowsDialog prd = new PreviewRowsDialog( shell, transMeta, SWT.NONE, wStepname.getText(), progressDialog.getPreviewRowsMeta( wStepname.getText() ), progressDialog.getPreviewRows( wStepname.getText() ), loggingText ); prd.open(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.node.internal; import com.google.common.collect.ImmutableList; import com.google.common.collect.UnmodifiableIterator; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Names; import org.elasticsearch.common.Strings; import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.FailedToResolveConfigException; import java.util.List; import java.util.Map; import static org.elasticsearch.common.Strings.cleanPath; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; /** * */ public class InternalSettingsPreparer { static final List<String> ALLOWED_SUFFIXES = ImmutableList.of(".yml", ".yaml", ".json", ".properties"); public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; public static final String IGNORE_SYSTEM_PROPERTIES_SETTING = "config.ignore_system_properties"; /** * Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings, * and then replacing all property placeholders. This method will not work with settings that have <code>${prompt.text}</code> * or <code>${prompt.secret}</code> as their value unless they have been resolved previously. * @param pSettings The initial settings to use * @param loadConfigSettings flag to indicate whether to load settings from the configuration directory/file * @return the {@link Settings} and {@link Environment} as a {@link Tuple} */ public static Tuple<Settings, Environment> prepareSettings(Settings pSettings, boolean loadConfigSettings) { return prepareSettings(pSettings, loadConfigSettings, null); } /** * Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings, * and then replacing all property placeholders. If a {@link Terminal} is provided and configuration settings are loaded, * settings with a value of <code>${prompt.text}</code> or <code>${prompt.secret}</code> will result in a prompt for * the setting to the user. * @param pSettings The initial settings to use * @param loadConfigSettings flag to indicate whether to load settings from the configuration directory/file * @param terminal the Terminal to use for input/output * @return the {@link Settings} and {@link Environment} as a {@link Tuple} */ public static Tuple<Settings, Environment> prepareSettings(Settings pSettings, boolean loadConfigSettings, Terminal terminal) { // ignore this prefixes when getting properties from es. and elasticsearch. String[] ignorePrefixes = new String[]{"es.default.", "elasticsearch.default."}; boolean useSystemProperties = !pSettings.getAsBoolean(IGNORE_SYSTEM_PROPERTIES_SETTING, false); // just create enough settings to build the environment ImmutableSettings.Builder settingsBuilder = settingsBuilder().put(pSettings); if (useSystemProperties) { settingsBuilder.putProperties("elasticsearch.default.", System.getProperties()) .putProperties("es.default.", System.getProperties()) .putProperties("elasticsearch.", System.getProperties(), ignorePrefixes) .putProperties("es.", System.getProperties(), ignorePrefixes); } settingsBuilder.replacePropertyPlaceholders(); Environment environment = new Environment(settingsBuilder.build()); if (loadConfigSettings) { boolean loadFromEnv = true; if (useSystemProperties) { // if its default, then load it, but also load form env if (Strings.hasText(System.getProperty("es.default.config"))) { loadFromEnv = true; settingsBuilder.loadFromUrl(environment.resolveConfig(System.getProperty("es.default.config"))); } // if explicit, just load it and don't load from env if (Strings.hasText(System.getProperty("es.config"))) { loadFromEnv = false; settingsBuilder.loadFromUrl(environment.resolveConfig(System.getProperty("es.config"))); } if (Strings.hasText(System.getProperty("elasticsearch.config"))) { loadFromEnv = false; settingsBuilder.loadFromUrl(environment.resolveConfig(System.getProperty("elasticsearch.config"))); } } if (loadFromEnv) { for (String allowedSuffix : ALLOWED_SUFFIXES) { try { settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch" + allowedSuffix)); } catch (FailedToResolveConfigException e) { // ignore } } } } settingsBuilder.put(pSettings); if (useSystemProperties) { settingsBuilder.putProperties("elasticsearch.", System.getProperties(), ignorePrefixes) .putProperties("es.", System.getProperties(), ignorePrefixes); } settingsBuilder.replacePropertyPlaceholders(); // allow to force set properties based on configuration of the settings provided for (Map.Entry<String, String> entry : pSettings.getAsMap().entrySet()) { String setting = entry.getKey(); if (setting.startsWith("force.")) { settingsBuilder.remove(setting); settingsBuilder.put(setting.substring("force.".length()), entry.getValue()); } } settingsBuilder.replacePropertyPlaceholders(); // check if name is set in settings, if not look for system property and set it if (settingsBuilder.get("name") == null) { String name = System.getProperty("name"); if (name != null) { settingsBuilder.put("name", name); } } // put the cluster name if (settingsBuilder.get(ClusterName.SETTING) == null) { settingsBuilder.put(ClusterName.SETTING, ClusterName.DEFAULT.value()); } Settings settings = replacePromptPlaceholders(settingsBuilder.build(), terminal); // all settings placeholders have been resolved. resolve the value for the name setting by checking for name, // then looking for node.name, and finally generate one if needed if (settings.get("name") == null) { final String name = settings.get("node.name"); if (name == null || name.isEmpty()) { settings = settingsBuilder().put(settings) .put("name", Names.randomNodeName(environment.resolveConfig("names.txt"))) .build(); } else { settings = settingsBuilder().put(settings) .put("name", name) .build(); } } environment = new Environment(settings); // put back the env settings settingsBuilder = settingsBuilder().put(settings); // we put back the path.logs so we can use it in the logging configuration file settingsBuilder.put("path.logs", cleanPath(environment.logsFile().getAbsolutePath())); settings = settingsBuilder.build(); return new Tuple<>(settings, environment); } static Settings replacePromptPlaceholders(Settings settings, Terminal terminal) { UnmodifiableIterator<Map.Entry<String, String>> iter = settings.getAsMap().entrySet().iterator(); ImmutableSettings.Builder builder = ImmutableSettings.builder().classLoader(settings.getClassLoaderIfSet()); while (iter.hasNext()) { Map.Entry<String, String> entry = iter.next(); String value = entry.getValue(); String key = entry.getKey(); switch (value) { case SECRET_PROMPT_VALUE: String secretValue = promptForValue(key, terminal, true); if (Strings.hasLength(secretValue)) { builder.put(key, secretValue); } break; case TEXT_PROMPT_VALUE: String textValue = promptForValue(key, terminal, false); if (Strings.hasLength(textValue)) { builder.put(key, textValue); } break; default: builder.put(key, value); break; } } return builder.build(); } static String promptForValue(String key, Terminal terminal, boolean secret) { if (terminal == null) { throw new UnsupportedOperationException("found property [" + key + "] with value [" + (secret ? SECRET_PROMPT_VALUE : TEXT_PROMPT_VALUE) +"]. prompting for property values is only supported when running elasticsearch in the foreground"); } if (secret) { return new String(terminal.readSecret("Enter value for [%s]: ", key)); } return terminal.readText("Enter value for [%s]: ", key); } }
package org.apache.cordova.test.junit; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.os.Environment; import android.provider.MediaStore; import android.test.ActivityInstrumentationTestCase2; import android.test.suitebuilder.annotation.Suppress; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaResourceApi; import org.apache.cordova.CordovaResourceApi.OpenForReadResult; import org.apache.cordova.CordovaWebView; import org.apache.cordova.PluginEntry; import org.apache.cordova.test.CordovaWebViewTestActivity; import org.apache.cordova.test.R; import org.json.JSONArray; import org.json.JSONException; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.util.Scanner; public class CordovaResourceApiTest extends ActivityInstrumentationTestCase2<CordovaWebViewTestActivity> { public CordovaResourceApiTest() { super(CordovaWebViewTestActivity.class); } CordovaWebView cordovaWebView; CordovaResourceApi resourceApi; private CordovaWebViewTestActivity activity; String execPayload; Integer execStatus; protected void setUp() throws Exception { super.setUp(); activity = this.getActivity(); cordovaWebView = activity.cordovaWebView; resourceApi = cordovaWebView.getResourceApi(); resourceApi.setThreadCheckingEnabled(false); cordovaWebView.pluginManager.addService(new PluginEntry("CordovaResourceApiTestPlugin1", new CordovaPlugin() { @Override public Uri remapUri(Uri uri) { if (uri.getQuery() != null && uri.getQuery().contains("pluginRewrite")) { return cordovaWebView.getResourceApi().remapUri( Uri.parse("data:text/plain;charset=utf-8,pass")); } return null; } public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { synchronized (CordovaResourceApiTest.this) { execPayload = args.getString(0); execStatus = args.getInt(1); CordovaResourceApiTest.this.notify(); } return true; } })); } private Uri createTestImageContentUri() { Bitmap imageBitmap = BitmapFactory.decodeResource(activity.getResources(), R.drawable.icon); String stored = MediaStore.Images.Media.insertImage(activity.getContentResolver(), imageBitmap, "app-icon", "desc"); return Uri.parse(stored); } private void performApiTest(Uri uri, String expectedMimeType, File expectedLocalFile, boolean expectRead, boolean expectWrite) throws IOException { uri = resourceApi.remapUri(uri); assertEquals(expectedLocalFile, resourceApi.mapUriToFile(uri)); try { OpenForReadResult readResult = resourceApi.openForRead(uri); String mimeType2 = resourceApi.getMimeType(uri); assertEquals("openForRead mime-type", expectedMimeType, readResult.mimeType); assertEquals("getMimeType mime-type", expectedMimeType, mimeType2); readResult.inputStream.read(); if (!expectRead) { fail("Expected getInputStream to throw."); } } catch (IOException e) { if (expectRead) { throw e; } } try { OutputStream outStream = resourceApi.openOutputStream(uri); outStream.write(123); if (!expectWrite) { fail("Expected getOutputStream to throw."); } outStream.close(); } catch (IOException e) { if (expectWrite) { throw e; } } } @Suppress public void testValidContentUri() throws IOException { Uri contentUri = createTestImageContentUri(); File localFile = resourceApi.mapUriToFile(contentUri); assertNotNull(localFile); performApiTest(contentUri, "image/jpeg", localFile, true, true); } public void testInvalidContentUri() throws IOException { Uri contentUri = Uri.parse("content://media/external/images/media/999999999"); performApiTest(contentUri, null, null, false, false); } public void testValidAssetUri() throws IOException { Uri assetUri = Uri.parse("file:///android_asset/www/index.html?foo#bar"); // Also check for stripping off ? and # correctly. performApiTest(assetUri, "text/html", null, true, false); } public void testInvalidAssetUri() throws IOException { Uri assetUri = Uri.parse("file:///android_asset/www/missing.html"); performApiTest(assetUri, "text/html", null, false, false); } public void testFileUriToExistingFile() throws IOException { File f = File.createTempFile("te s t", ".txt"); // Also check for dealing with spaces. try { Uri fileUri = Uri.parse(f.toURI().toString() + "?foo#bar"); // Also check for stripping off ? and # correctly. performApiTest(fileUri, "text/plain", f, true, true); } finally { f.delete(); } } public void testFileUriToMissingFile() throws IOException { File f = new File(Environment.getExternalStorageDirectory() + "/somefilethatdoesntexist"); Uri fileUri = Uri.parse(f.toURI().toString()); try { performApiTest(fileUri, null, f, false, true); } finally { f.delete(); } } public void testFileUriToMissingFileWithMissingParent() throws IOException { File f = new File(Environment.getExternalStorageDirectory() + "/somedirthatismissing" + System.currentTimeMillis() + "/somefilethatdoesntexist"); Uri fileUri = Uri.parse(f.toURI().toString()); performApiTest(fileUri, null, f, false, true); } public void testUnrecognizedUri() throws IOException { Uri uri = Uri.parse("somescheme://foo"); performApiTest(uri, null, null, false, false); } public void testRelativeUri() { try { resourceApi.openForRead(Uri.parse("/foo")); fail("Should have thrown for relative URI 1."); } catch (Throwable t) { } try { resourceApi.openForRead(Uri.parse("//foo/bar")); fail("Should have thrown for relative URI 2."); } catch (Throwable t) { } try { resourceApi.openForRead(Uri.parse("foo.png")); fail("Should have thrown for relative URI 3."); } catch (Throwable t) { } } public void testPluginOverride() throws IOException { Uri uri = Uri.parse("plugin-uri://foohost/android_asset/www/index.html?pluginRewrite=yes"); performApiTest(uri, "text/plain", null, true, false); } public void testMainThreadUsage() throws IOException { Uri assetUri = Uri.parse("file:///android_asset/www/index.html"); resourceApi.setThreadCheckingEnabled(true); try { resourceApi.openForRead(assetUri); fail("Should have thrown for main thread check."); } catch (Throwable t) { } } public void testDataUriPlain() throws IOException { Uri uri = Uri.parse("data:text/plain;charset=utf-8,pa%20ss"); OpenForReadResult readResult = resourceApi.openForRead(uri); assertEquals("text/plain", readResult.mimeType); String data = new Scanner(readResult.inputStream, "UTF-8").useDelimiter("\\A").next(); assertEquals("pa ss", data); } public void testDataUriBase64() throws IOException { Uri uri = Uri.parse("data:text/js;charset=utf-8;base64,cGFzcw=="); OpenForReadResult readResult = resourceApi.openForRead(uri); assertEquals("text/js", readResult.mimeType); String data = new Scanner(readResult.inputStream, "UTF-8").useDelimiter("\\A").next(); assertEquals("pass", data); } /* TODO(Junmin): fix this case. public void testWebViewRequestIntercept() throws IOException { cordovaWebView.sendJavascript( "var x = new XMLHttpRequest;\n" + "x.open('GET', 'file://foo?pluginRewrite=1', false);\n" + "x.send();\n" + "cordova.require('cordova/exec')(null,null,'CordovaResourceApiTestPlugin1', 'foo', [x.responseText, x.status])"); execPayload = null; execStatus = null; try { synchronized (this) { this.wait(2000); } } catch (InterruptedException e) { } assertEquals("pass", execPayload); assertEquals(execStatus.intValue(), 200); } */ /* TODO(Junmin): fix this case. public void testWebViewWhiteListRejection() throws IOException { cordovaWebView.sendJavascript( "var x = new XMLHttpRequest;\n" + "x.open('GET', 'http://foo/bar', false);\n" + "x.send();\n" + "cordova.require('cordova/exec')(null,null,'CordovaResourceApiTestPlugin1', 'foo', [x.responseText, x.status])"); execPayload = null; execStatus = null; try { synchronized (this) { this.wait(2000); } } catch (InterruptedException e) { } assertEquals("", execPayload); assertEquals(execStatus.intValue(), 404); } */ }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.impl.projectlevelman; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.util.BackgroundTaskUtil; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.*; import com.intellij.openapi.vcs.impl.DefaultVcsRootPolicy; import com.intellij.openapi.vcs.impl.ProjectLevelVcsManagerImpl; import com.intellij.openapi.vcs.impl.VcsInitObject; import com.intellij.openapi.vcs.ui.VcsBalloonProblemNotifier; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ArrayUtil; import com.intellij.util.Functions; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.function.Function; import static com.intellij.util.containers.ContainerUtil.map; import static com.intellij.util.containers.ContainerUtil.mapNotNull; import static java.util.function.Function.identity; public class NewMappings { public static final Comparator<VcsDirectoryMapping> MAPPINGS_COMPARATOR = Comparator.comparing(VcsDirectoryMapping::getDirectory); private final static Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.impl.projectlevelman.NewMappings"); private final Object myLock; // vcs to mappings private final MultiMap<String, VcsDirectoryMapping> myVcsToPaths; private AbstractVcs[] myActiveVcses; private VcsDirectoryMapping[] mySortedMappings; private FileWatchRequestsManager myFileWatchRequestsManager; private final DefaultVcsRootPolicy myDefaultVcsRootPolicy; private final ProjectLevelVcsManager myVcsManager; private final FileStatusManager myFileStatusManager; private final Project myProject; private boolean myActivated; public NewMappings(Project project, ProjectLevelVcsManagerImpl vcsManager, FileStatusManager fileStatusManager) { myProject = project; myVcsManager = vcsManager; myFileStatusManager = fileStatusManager; myLock = new Object(); myVcsToPaths = MultiMap.createOrderedSet(); myFileWatchRequestsManager = new FileWatchRequestsManager(myProject, this, LocalFileSystem.getInstance()); myDefaultVcsRootPolicy = DefaultVcsRootPolicy.getInstance(project); myActiveVcses = new AbstractVcs[0]; if (!myProject.isDefault()) { VcsDirectoryMapping mapping = new VcsDirectoryMapping("", ""); myVcsToPaths.putValue("", mapping); mySortedMappings = new VcsDirectoryMapping[]{mapping}; } else { mySortedMappings = VcsDirectoryMapping.EMPTY_ARRAY; } myActivated = false; vcsManager.addInitializationRequest(VcsInitObject.MAPPINGS, (DumbAwareRunnable)() -> { if (!myProject.isDisposed()) { activateActiveVcses(); } }); } // for tests public void setFileWatchRequestsManager(FileWatchRequestsManager fileWatchRequestsManager) { assert ApplicationManager.getApplication().isUnitTestMode(); myFileWatchRequestsManager = fileWatchRequestsManager; } public AbstractVcs[] getActiveVcses() { synchronized (myLock) { final AbstractVcs[] result = new AbstractVcs[myActiveVcses.length]; System.arraycopy(myActiveVcses, 0, result, 0, myActiveVcses.length); return result; } } public boolean hasActiveVcss() { synchronized (myLock) { return myActiveVcses.length > 0; } } public void activateActiveVcses() { synchronized (myLock) { if (myActivated) return; myActivated = true; } keepActiveVcs(EmptyRunnable.getInstance()); mappingsChanged(); } public void setMapping(final String path, final String activeVcsName) { LOG.debug("setMapping path = '" + path + "' vcs = " + activeVcsName); final VcsDirectoryMapping newMapping = new VcsDirectoryMapping(path, activeVcsName); // do not add duplicates synchronized (myLock) { Collection<VcsDirectoryMapping> vcsDirectoryMappings = myVcsToPaths.get(activeVcsName); if (vcsDirectoryMappings.contains(newMapping)) { return; } } final Ref<Boolean> switched = new Ref<>(Boolean.FALSE); keepActiveVcs(() -> { // sorted -> map. sorted mappings are NOT changed; switched.set(trySwitchVcs(path, activeVcsName)); if (!switched.get().booleanValue()) { myVcsToPaths.putValue(newMapping.getVcs(), newMapping); sortedMappingsByMap(); } }); mappingsChanged(); } private void keepActiveVcs(@NotNull Runnable runnable) { final MyVcsActivator activator; synchronized (myLock) { if (!myActivated) { runnable.run(); return; } final HashSet<String> old = new HashSet<>(); for (AbstractVcs activeVcs : myActiveVcses) { old.add(activeVcs.getName()); } activator = new MyVcsActivator(old); runnable.run(); restoreActiveVcses(); } activator.activate(myVcsToPaths.keySet(), AllVcses.getInstance(myProject)); } private void restoreActiveVcses() { synchronized (myLock) { final Set<String> set = myVcsToPaths.keySet(); final List<AbstractVcs> list = new ArrayList<>(set.size()); for (String s : set) { if (s.trim().length() == 0) continue; final AbstractVcs vcs = AllVcses.getInstance(myProject).getByName(s); if (vcs != null) { list.add(vcs); } } myActiveVcses = list.toArray(new AbstractVcs[list.size()]); } } public void mappingsChanged() { BackgroundTaskUtil.syncPublisher(myProject, ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED).directoryMappingChanged(); myFileStatusManager.fileStatusesChanged(); myFileWatchRequestsManager.ping(); dumpMappingsToLog(); } private void dumpMappingsToLog() { for (VcsDirectoryMapping mapping : mySortedMappings) { String path = mapping.isDefaultMapping() ? VcsDirectoryMapping.PROJECT_CONSTANT : mapping.getDirectory(); String vcs = mapping.getVcs(); LOG.info(String.format("VCS Root: [%s] - [%s]", vcs, path)); } } public void setDirectoryMappings(final List<VcsDirectoryMapping> items) { LOG.debug("setDirectoryMappings, size: " + items.size()); final List<VcsDirectoryMapping> itemsCopy; if (items.isEmpty()) { itemsCopy = Collections.singletonList(new VcsDirectoryMapping("", "")); } else { itemsCopy = items; } keepActiveVcs(() -> { myVcsToPaths.clear(); for (VcsDirectoryMapping mapping : itemsCopy) { myVcsToPaths.putValue(mapping.getVcs(), mapping); } sortedMappingsByMap(); }); mappingsChanged(); } @Nullable public VcsDirectoryMapping getMappingFor(@Nullable VirtualFile file) { if (file == null) return null; if (!file.isInLocalFileSystem()) { return null; } return getMappingFor(file, myDefaultVcsRootPolicy.getMatchContext(file)); } @Nullable public VcsDirectoryMapping getMappingFor(final VirtualFile file, final Object parentModule) { // if parentModule is not null it means that file belongs to the module so it isn't excluded if (parentModule == null && myVcsManager.isIgnored(file)) { return null; } // performance: calculate file path just once, rather than once per mapping String path = file.getPath(); final String systemIndependentPath = FileUtil.toSystemIndependentName((file.isDirectory() && (!path.endsWith("/"))) ? (path + "/") : path); final VcsDirectoryMapping[] mappings; synchronized (myLock) { mappings = mySortedMappings; } for (int i = mappings.length - 1; i >= 0; --i) { final VcsDirectoryMapping mapping = mappings[i]; if (fileMatchesMapping(file, parentModule, systemIndependentPath, mapping)) { return mapping; } } return null; } @Nullable public String getVcsFor(@NotNull VirtualFile file) { VcsDirectoryMapping mapping = getMappingFor(file); if (mapping == null) { return null; } return mapping.getVcs(); } private boolean fileMatchesMapping(@NotNull VirtualFile file, final Object matchContext, final String systemIndependentPath, final VcsDirectoryMapping mapping) { if (mapping.getDirectory().length() == 0) { return myDefaultVcsRootPolicy.matchesDefaultMapping(file, matchContext); } return FileUtil.startsWith(systemIndependentPath, mapping.systemIndependentPath()); } @NotNull public List<VirtualFile> getMappingsAsFilesUnderVcs(@NotNull AbstractVcs vcs) { final List<VirtualFile> result = new ArrayList<>(); final String vcsName = vcs.getName(); final List<VcsDirectoryMapping> mappings; synchronized (myLock) { final Collection<VcsDirectoryMapping> vcsMappings = myVcsToPaths.get(vcsName); if (vcsMappings.isEmpty()) return result; mappings = new ArrayList<>(vcsMappings); } for (VcsDirectoryMapping mapping : mappings) { if (mapping.isDefaultMapping()) { result.addAll(myDefaultVcsRootPolicy.getDefaultVcsRoots(this, vcsName)); } else { final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(mapping.getDirectory()); if (file != null) { result.add(file); } } } result.removeIf(file -> !file.isDirectory()); return result; } public void disposeMe() { LOG.debug("dispose me"); clearImpl(); } public void clear() { LOG.debug("clear"); clearImpl(); mappingsChanged(); } private void clearImpl() { // if vcses were not mapped, there's nothing to clear if ((myActiveVcses == null) || (myActiveVcses.length == 0)) return; keepActiveVcs(() -> { myVcsToPaths.clear(); myActiveVcses = new AbstractVcs[0]; mySortedMappings = VcsDirectoryMapping.EMPTY_ARRAY; }); myFileWatchRequestsManager.ping(); } public List<VcsDirectoryMapping> getDirectoryMappings() { synchronized (myLock) { return Arrays.asList(mySortedMappings); } } public List<VcsDirectoryMapping> getDirectoryMappings(String vcsName) { synchronized (myLock) { Collection<VcsDirectoryMapping> mappings = myVcsToPaths.get(vcsName); return mappings.isEmpty() ? new ArrayList<>() : new ArrayList<>(mappings); } } public void cleanupMappings() { synchronized (myLock) { removeRedundantMappings(); } myFileWatchRequestsManager.ping(); } @Nullable public String haveDefaultMapping() { synchronized (myLock) { // empty mapping MUST be first if (mySortedMappings.length == 0) return null; return mySortedMappings[0].isDefaultMapping() ? mySortedMappings[0].getVcs() : null; } } public boolean isEmpty() { synchronized (myLock) { return mySortedMappings.length == 0 || ContainerUtil.and(mySortedMappings, mapping -> mapping.getVcs().isEmpty()); } } public void removeDirectoryMapping(final VcsDirectoryMapping mapping) { LOG.debug("remove mapping: " + mapping.getDirectory()); keepActiveVcs(() -> { if (removeVcsFromMap(mapping, mapping.getVcs())) { sortedMappingsByMap(); } }); mappingsChanged(); } // todo area for optimization private void removeRedundantMappings() { final LocalFileSystem lfs = LocalFileSystem.getInstance(); final AllVcsesI allVcses = AllVcses.getInstance(myProject); for (Iterator<String> iterator = myVcsToPaths.keySet().iterator(); iterator.hasNext(); ) { final String vcsName = iterator.next(); final Collection<VcsDirectoryMapping> mappings = myVcsToPaths.get(vcsName); List<Pair<VirtualFile, VcsDirectoryMapping>> objects = mapNotNull(mappings, dm -> { VirtualFile vf = lfs.findFileByPath(dm.getDirectory()); if (vf == null) { vf = lfs.refreshAndFindFileByPath(dm.getDirectory()); } return vf == null ? null : Pair.create(vf, dm); }); final List<Pair<VirtualFile, VcsDirectoryMapping>> filteredFiles; // todo static Function<Pair<VirtualFile, VcsDirectoryMapping>, VirtualFile> fileConvertor = pair -> pair.getFirst(); if (StringUtil.isEmptyOrSpaces(vcsName)) { filteredFiles = AbstractVcs.filterUniqueRootsDefault(objects, fileConvertor); } else { final AbstractVcs<?> vcs = allVcses.getByName(vcsName); if (vcs == null) { VcsBalloonProblemNotifier.showOverChangesView(myProject, "VCS plugin not found for mapping to : '" + vcsName + "'", MessageType.ERROR); continue; } filteredFiles = vcs.filterUniqueRoots(objects, fileConvertor); } List<VcsDirectoryMapping> filteredMappings = map(filteredFiles, Functions.pairSecond()); // to calculate what had been removed mappings.removeAll(filteredMappings); if (filteredMappings.isEmpty()) { iterator.remove(); } else { mappings.clear(); mappings.addAll(filteredMappings); } } sortedMappingsByMap(); } private boolean trySwitchVcs(final String path, final String activeVcsName) { final String fixedPath = FileUtil.toSystemIndependentName(path); for (VcsDirectoryMapping mapping : mySortedMappings) { if (mapping.systemIndependentPath().equals(fixedPath)) { final String oldVcs = mapping.getVcs(); if (!oldVcs.equals(activeVcsName)) { migrateVcs(activeVcsName, mapping, oldVcs); } return true; } } return false; } private void sortedMappingsByMap() { mySortedMappings = ArrayUtil.toObjectArray(myVcsToPaths.values(), VcsDirectoryMapping.class); Arrays.sort(mySortedMappings, MAPPINGS_COMPARATOR); } private void migrateVcs(String activeVcsName, VcsDirectoryMapping mapping, String oldVcs) { mapping.setVcs(activeVcsName); removeVcsFromMap(mapping, oldVcs); myVcsToPaths.putValue(activeVcsName, mapping); } private boolean removeVcsFromMap(VcsDirectoryMapping mapping, String oldVcs) { return myVcsToPaths.remove(oldVcs, mapping); } private static class MyVcsActivator { private final Set<String> myOld; public MyVcsActivator(final Set<String> old) { myOld = old; } public void activate(final Set<String> newOne, final AllVcsesI vcsesI) { final Set<String> toAdd = notInBottom(newOne, myOld); final Set<String> toRemove = notInBottom(myOld, newOne); if (toAdd != null) { for (String s : toAdd) { final AbstractVcs vcs = vcsesI.getByName(s); if (vcs != null) { try { vcs.doActivate(); } catch (VcsException e) { // actually is not thrown (AbstractVcs#actualActivate()) } } else { LOG.info("Error: activating non existing vcs: " + s); } } } if (toRemove != null) { for (String s : toRemove) { final AbstractVcs vcs = vcsesI.getByName(s); if (vcs != null) { try { vcs.doDeactivate(); } catch (VcsException e) { // actually is not thrown (AbstractVcs#actualDeactivate()) } } else { LOG.info("Error: removing non existing vcs: " + s); } } } } @Nullable private static Set<String> notInBottom(final Set<String> top, final Set<String> bottom) { Set<String> notInBottom = null; for (String topItem : top) { // omit empty vcs: not a vcs if (topItem.trim().length() == 0) continue; if (!bottom.contains(topItem)) { if (notInBottom == null) { notInBottom = new HashSet<>(); } notInBottom.add(topItem); } } return notInBottom; } } public boolean haveActiveVcs(final String name) { synchronized (myLock) { return myVcsToPaths.containsKey(name); } } public void beingUnregistered(final String name) { synchronized (myLock) { keepActiveVcs(() -> { myVcsToPaths.remove(name); sortedMappingsByMap(); }); } mappingsChanged(); } @NotNull public List<VirtualFile> getDefaultRoots() { synchronized (myLock) { final String defaultVcs = haveDefaultMapping(); if (defaultVcs == null) return Collections.emptyList(); final List<VirtualFile> list = new ArrayList<>(myDefaultVcsRootPolicy.getDefaultVcsRoots(this, defaultVcs)); if (StringUtil.isEmptyOrSpaces(defaultVcs)) { return AbstractVcs.filterUniqueRootsDefault(list, identity()); } else { final AbstractVcs<?> vcs = AllVcses.getInstance(myProject).getByName(defaultVcs); if (vcs == null) { return AbstractVcs.filterUniqueRootsDefault(list, identity()); } return vcs.filterUniqueRoots(list, identity()); } } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import com.google.devtools.build.lib.actions.LocalHostCapacity; import com.google.devtools.build.lib.analysis.BuildView; import com.google.devtools.build.lib.analysis.OutputGroupProvider; import com.google.devtools.build.lib.analysis.TopLevelArtifactContext; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.exec.ExecutionOptions; import com.google.devtools.build.lib.packages.SkylarkSemanticsOptions; import com.google.devtools.build.lib.pkgcache.LoadingOptions; import com.google.devtools.build.lib.pkgcache.PackageCacheOptions; import com.google.devtools.build.lib.runtime.BlazeCommandEventHandler; import com.google.devtools.build.lib.util.OptionsUtils; import com.google.devtools.build.lib.util.io.OutErr; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.common.options.Converters; import com.google.devtools.common.options.Converters.RangeConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsClassProvider; import com.google.devtools.common.options.OptionsParsingException; import com.google.devtools.common.options.OptionsProvider; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.logging.Logger; import java.util.regex.Pattern; /** * A BuildRequest represents a single invocation of the build tool by a user. * A request specifies a list of targets to be built for a single * configuration, a pair of output/error streams, and additional options such * as --keep_going, --jobs, etc. */ public class BuildRequest implements OptionsClassProvider { private static final Logger logger = Logger.getLogger(BuildRequest.class.getName()); /** * Options interface--can be used to parse command-line arguments. * * <p>See also ExecutionOptions; from the user's point of view, there's no * qualitative difference between these two sets of options. */ public static class BuildRequestOptions extends OptionsBase { /* "Execution": options related to the execution of a build: */ @Option( name = "jobs", abbrev = 'j', defaultValue = "auto", category = "strategy", documentationCategory = OptionDocumentationCategory.EXECUTION_STRATEGY, effectTags = {OptionEffectTag.HOST_MACHINE_RESOURCE_OPTIMIZATIONS, OptionEffectTag.EXECUTION}, converter = JobsConverter.class, help = "The number of concurrent jobs to run. 0 means build sequentially." + " \"auto\" means to use a reasonable value derived from the machine's hardware" + " profile (e.g. the number of processors). Values above " + MAX_JOBS + " are not allowed, and values above " + JOBS_TOO_HIGH_WARNING + " may cause memory issues." ) public int jobs; @Option( name = "progress_report_interval", defaultValue = "0", category = "verbosity", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, converter = ProgressReportIntervalConverter.class, help = "The number of seconds to wait between two reports on still running jobs. The " + "default value 0 means to use the default 10:30:60 incremental algorithm." ) public int progressReportInterval; @Option( name = "explain", defaultValue = "null", category = "verbosity", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, converter = OptionsUtils.PathFragmentConverter.class, help = "Causes the build system to explain each executed step of the " + "build. The explanation is written to the specified log file." ) public PathFragment explanationPath; @Option( name = "verbose_explanations", defaultValue = "false", category = "verbosity", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "Increases the verbosity of the explanations issued if --explain is enabled. " + "Has no effect if --explain is not enabled." ) public boolean verboseExplanations; @Option( name = "output_filter", converter = Converters.RegexPatternConverter.class, defaultValue = "null", category = "flags", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "Only shows warnings for rules with a name matching the provided regular expression." ) public Pattern outputFilter; @Deprecated @Option( name = "dump_makefile", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "this flag has no effect." ) public boolean dumpMakefile; @Deprecated @Option( name = "dump_action_graph", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "this flag has no effect." ) public boolean dumpActionGraph; @Deprecated @Option( name = "dump_action_graph_for_package", allowMultiple = true, defaultValue = "", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "this flag has no effect." ) public List<String> dumpActionGraphForPackage = new ArrayList<>(); @Deprecated @Option( name = "dump_action_graph_with_middlemen", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "this flag has no effect." ) public boolean dumpActionGraphWithMiddlemen; @Deprecated @Option( name = "dump_providers", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "This is a no-op." ) public boolean dumpProviders; @Deprecated @Option( name = "dump_targets", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "this flag has no effect." ) public String dumpTargets; @Deprecated @Option( name = "dump_host_deps", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "Deprecated" ) public boolean dumpHostDeps; @Deprecated @Option( name = "dump_to_stdout", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "Deprecated" ) public boolean dumpToStdout; @Option( name = "experimental_post_build_query", defaultValue = "null", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.UNKNOWN} ) public String queryExpression; @Option( name = "analyze", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = { OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.AFFECTS_OUTPUTS }, help = "Execute the analysis phase; this is the usual behaviour. Specifying --noanalyze causes " + "the build to stop before starting the analysis phase, returning zero iff the " + "package loading completed successfully; this mode is useful for testing." ) public boolean performAnalysisPhase; @Option( name = "build", defaultValue = "true", category = "what", documentationCategory = OptionDocumentationCategory.OUTPUT_SELECTION, effectTags = { OptionEffectTag.EXECUTION, OptionEffectTag.AFFECTS_OUTPUTS }, help = "Execute the build; this is the usual behaviour. " + "Specifying --nobuild causes the build to stop before executing the build " + "actions, returning zero iff the package loading and analysis phases completed " + "successfully; this mode is useful for testing those phases." ) public boolean performExecutionPhase; @Option( name = "output_groups", converter = Converters.CommaSeparatedOptionListConverter.class, allowMultiple = true, documentationCategory = OptionDocumentationCategory.OUTPUT_SELECTION, effectTags = {OptionEffectTag.EXECUTION, OptionEffectTag.AFFECTS_OUTPUTS}, defaultValue = "", help = "Specifies which output groups of the top-level targets to build. If omitted, a default " + "set of output groups are built. When specified the default set is overridden." + "However you may use --output_groups=+<output_group> or " + "--output_groups=-<output_group> to instead modify the set of output groups." ) public List<String> outputGroups; @Option( name = "show_result", defaultValue = "1", category = "verbosity", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "Show the results of the build. For each target, state whether or not it was brought " + "up-to-date, and if so, a list of output files that were built. The printed files " + "are convenient strings for copy+pasting to the shell, to execute them.\n" + "This option requires an integer argument, which is the threshold number of " + "targets above which result information is not printed. Thus zero causes " + "suppression of the message and MAX_INT causes printing of the result to occur " + "always. The default is one." ) public int maxResultTargets; @Option( name = "experimental_show_artifacts", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "Output a list of all top level artifacts produced by this build." + "Use output format suitable for tool consumption. " + "This flag is temporary and intended to facilitate Android Studio integration. " + "This output format will likely change in the future or disappear completely." ) public boolean showArtifacts; @Option( name = "announce", defaultValue = "false", category = "verbosity", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "Deprecated. No-op.", deprecationWarning = "This option is now deprecated and is a no-op" ) public boolean announce; @Option( name = "symlink_prefix", defaultValue = "null", category = "misc", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "The prefix that is prepended to any of the convenience symlinks that are created " + "after a build. If '/' is passed, then no symlinks are created and no warning is " + "emitted. If omitted, the default value is the name of the build tool." ) public String symlinkPrefix; @Option( name = "experimental_multi_cpu", converter = Converters.CommaSeparatedOptionListConverter.class, allowMultiple = true, defaultValue = "", category = "semantics", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, metadataTags = {OptionMetadataTag.EXPERIMENTAL}, help = "This flag allows specifying multiple target CPUs. If this is specified, " + "the --cpu option is ignored." ) public List<String> multiCpus; @Option( name = "output_tree_tracking", oldName = "experimental_output_tree_tracking", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION}, help = "If set, tell the output service (if any) to track when files in the output " + "tree have been modified externally (not by the build system). " + "This should improve incremental build speed when an appropriate output service " + "is enabled." ) public boolean finalizeActions; @Option( name = "aspects", converter = Converters.CommaSeparatedOptionListConverter.class, defaultValue = "", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.UNKNOWN}, allowMultiple = true, help = "Comma-separated list of aspects to be applied to top-level targets. All aspects " + "are applied to all top-level targets independently. Aspects are specified in " + "the form <bzl-file-label>%<aspect_name>, " + "for example '//tools:my_def.bzl%my_aspect', where 'my_aspect' is a top-level " + "value from from a file tools/my_def.bzl" ) public List<String> aspects; public String getSymlinkPrefix(String productName) { return symlinkPrefix == null ? productName + "-" : symlinkPrefix; } // Transitional flag for safely rolling out new convenience symlink behavior. // To be made a no-op and deleted once new symlink behavior is battle-tested. @Option( name = "use_top_level_targets_for_symlinks", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "If enabled, the symlinks are based on the configurations of the top-level targets " + " rather than the top-level target configuration. If this would be ambiguous, " + " the symlinks will be deleted to avoid confusion." ) public boolean useTopLevelTargetsForSymlinks; /** * Returns whether to use the output directories used by the top-level targets for convenience * symlinks. * * <p>If true, then symlinks use the actual output directories of the top-level targets. * The symlinks will be created iff all top-level targets share the same output directory. * Otherwise, any stale symlinks from previous invocations will be deleted to avoid ambiguity. * * <p>If false, then symlinks use the output directory implied by command-line flags, regardless * of whether top-level targets have transitions which change them (or even have any output * directories at all, as in the case of a build with no targets or one which only builds source * files). */ public boolean useTopLevelTargetsForSymlinks() { return useTopLevelTargetsForSymlinks; } @Option( name = "use_action_cache", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = { OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION, OptionEffectTag.HOST_MACHINE_RESOURCE_OPTIMIZATIONS }, help = "Whether to use the action cache" ) public boolean useActionCache; } /** Converter for jobs: [0, MAX_JOBS] or "auto". */ public static class JobsConverter extends RangeConverter { /** If not null, indicates the value to return when "auto" is selected. Useful for cases * where the number of jobs is bound by another factor different than what we compute here. */ private static Integer fixedAutoJobs; public JobsConverter() { super(0, MAX_JOBS); } @Override public Integer convert(String input) throws OptionsParsingException { if (input.equals("auto")) { int jobs; if (fixedAutoJobs == null) { jobs = (int) Math.ceil(LocalHostCapacity.getLocalHostCapacity().getCpuUsage()); if (jobs > MAX_JOBS) { logger.warning( "Detected " + jobs + " processors, which exceed the maximum allowed number of jobs of " + MAX_JOBS + "; something seems wrong"); jobs = MAX_JOBS; } } else { jobs = fixedAutoJobs; } logger.info("Flag \"jobs\" was set to \"auto\"; using " + jobs + " jobs"); return jobs; } else { return super.convert(input); } } @Override public String getTypeDescription() { return "\"auto\" or " + super.getTypeDescription(); } /** * Sets the value to return by this converter when "auto" is selected. * * @param jobs the number of jobs to return, or null to reenable automated detection */ public static void setFixedAutoJobs(Integer jobs) { Preconditions.checkArgument(jobs == null || jobs <= MAX_JOBS); fixedAutoJobs = jobs; } } /** * Converter for progress_report_interval: [0, 3600]. */ public static class ProgressReportIntervalConverter extends RangeConverter { public ProgressReportIntervalConverter() { super(0, 3600); } } @VisibleForTesting public static final int MAX_JOBS = 3000; private static final int JOBS_TOO_HIGH_WARNING = 1500; private final UUID id; private final LoadingCache<Class<? extends OptionsBase>, Optional<OptionsBase>> optionsCache; /** A human-readable description of all the non-default option settings. */ private final String optionsDescription; /** * The name of the Blaze command that the user invoked. * Used for --announce. */ private final String commandName; private final OutErr outErr; private final List<String> targets; private long startTimeMillis = 0; // milliseconds since UNIX epoch. private boolean needsInstrumentationFilter; private boolean runningInEmacs; private boolean runTests; private static final ImmutableList<Class<? extends OptionsBase>> MANDATORY_OPTIONS = ImmutableList.of( BuildRequestOptions.class, PackageCacheOptions.class, SkylarkSemanticsOptions.class, LoadingOptions.class, BuildView.Options.class, ExecutionOptions.class); private BuildRequest(String commandName, final OptionsProvider options, final OptionsProvider startupOptions, List<String> targets, OutErr outErr, UUID id, long startTimeMillis) { this.commandName = commandName; this.optionsDescription = OptionsUtils.asShellEscapedString(options); this.outErr = outErr; this.targets = targets; this.id = id; this.startTimeMillis = startTimeMillis; this.optionsCache = CacheBuilder.newBuilder() .build(new CacheLoader<Class<? extends OptionsBase>, Optional<OptionsBase>>() { @Override public Optional<OptionsBase> load(Class<? extends OptionsBase> key) throws Exception { OptionsBase result = options.getOptions(key); if (result == null && startupOptions != null) { result = startupOptions.getOptions(key); } return Optional.fromNullable(result); } }); for (Class<? extends OptionsBase> optionsClass : MANDATORY_OPTIONS) { Preconditions.checkNotNull(getOptions(optionsClass)); } } /** * Returns a unique identifier that universally identifies this build. */ public UUID getId() { return id; } /** * Returns the name of the Blaze command that the user invoked. */ public String getCommandName() { return commandName; } /** * Set to true if this build request was initiated by Emacs. * (Certain output formatting may be necessary.) */ public void setRunningInEmacs() { runningInEmacs = true; } boolean isRunningInEmacs() { return runningInEmacs; } /** * Enables test execution for this build request. */ public void setRunTests() { runTests = true; } /** * Returns true if tests should be run by the build tool. */ public boolean shouldRunTests() { return runTests; } /** * Returns the (immutable) list of targets to build in commandline * form. */ public List<String> getTargets() { return targets; } /** * Returns the output/error streams to which errors and progress messages * should be sent during the fulfillment of this request. */ public OutErr getOutErr() { return outErr; } @Override @SuppressWarnings("unchecked") public <T extends OptionsBase> T getOptions(Class<T> clazz) { try { return (T) optionsCache.get(clazz).orNull(); } catch (ExecutionException e) { throw new IllegalStateException(e); } } /** * Returns the set of command-line options specified for this request. */ public BuildRequestOptions getBuildOptions() { return getOptions(BuildRequestOptions.class); } /** * Returns the set of options related to the loading phase. */ public PackageCacheOptions getPackageCacheOptions() { return getOptions(PackageCacheOptions.class); } /** * Returns the set of options related to the loading phase. */ public LoadingOptions getLoadingOptions() { return getOptions(LoadingOptions.class); } /** * Returns the set of command-line options related to the view specified for * this request. */ public BuildView.Options getViewOptions() { return getOptions(BuildView.Options.class); } /** * Returns the set of execution options specified for this request. */ public ExecutionOptions getExecutionOptions() { return getOptions(ExecutionOptions.class); } /** * Returns the human-readable description of the non-default options * for this build request. */ public String getOptionsDescription() { return optionsDescription; } /** * Return the time (according to System.currentTimeMillis()) at which the * service of this request was started. */ public long getStartTime() { return startTimeMillis; } public void setNeedsInstrumentationFilter(boolean needInstrumentationFilter) { this.needsInstrumentationFilter = needInstrumentationFilter; } public boolean needsInstrumentationFilter() { return needsInstrumentationFilter; } /** * Validates the options for this BuildRequest. * * <p>Issues warnings or throws {@code InvalidConfigurationException} for option settings that * conflict. * * @return list of warnings */ public List<String> validateOptions() throws InvalidConfigurationException { List<String> warnings = new ArrayList<>(); int localTestJobs = getExecutionOptions().localTestJobs; if (localTestJobs < 0) { throw new InvalidConfigurationException(String.format( "Invalid parameter for --local_test_jobs: %d. Only values 0 or greater are " + "allowed.", localTestJobs)); } int jobs = getBuildOptions().jobs; if (localTestJobs > jobs) { warnings.add( String.format("High value for --local_test_jobs: %d. This exceeds the value for --jobs: " + "%d. Only up to %d local tests will run concurrently.", localTestJobs, jobs, jobs)); } // Validate other BuildRequest options. if (getBuildOptions().verboseExplanations && getBuildOptions().explanationPath == null) { warnings.add("--verbose_explanations has no effect when --explain=<file> is not enabled"); } return warnings; } /** Creates a new TopLevelArtifactContext from this build request. */ public TopLevelArtifactContext getTopLevelArtifactContext() { return new TopLevelArtifactContext( getOptions(ExecutionOptions.class).testStrategy.equals("exclusive"), OutputGroupProvider.determineOutputGroups(getBuildOptions().outputGroups)); } public ImmutableSortedSet<String> getMultiCpus() { return ImmutableSortedSet.copyOf(getBuildOptions().multiCpus); } public ImmutableList<String> getAspects() { return ImmutableList.copyOf(getBuildOptions().aspects); } public static BuildRequest create(String commandName, OptionsProvider options, OptionsProvider startupOptions, List<String> targets, OutErr outErr, UUID commandId, long commandStartTime) { BuildRequest request = new BuildRequest(commandName, options, startupOptions, targets, outErr, commandId, commandStartTime); // All this, just to pass a global boolean from the client to the server. :( if (options.getOptions(BlazeCommandEventHandler.Options.class).runningInEmacs) { request.setRunningInEmacs(); } return request; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.macro; import com.intellij.ide.DataManager; import com.intellij.ide.IdeBundle; import com.intellij.openapi.actionSystem.impl.SimpleDataContext; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.text.StringUtil; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; public final class MacrosDialog extends DialogWrapper { private final DefaultListModel myMacrosModel; private final JList myMacrosList; private final JTextArea myPreviewTextarea; public MacrosDialog(Project project) { super(project, true); MacroManager.getInstance().cacheMacrosPreview(SimpleDataContext.getProjectContext(project)); setTitle(IdeBundle.message("title.macros")); setOKButtonText(IdeBundle.message("button.insert")); myMacrosModel = new DefaultListModel(); myMacrosList = new JList(myMacrosModel); myPreviewTextarea = new JTextArea(); init(); } public MacrosDialog(Component parent) { super(parent, true); MacroManager.getInstance().cacheMacrosPreview(DataManager.getInstance().getDataContext(parent)); setTitle(IdeBundle.message("title.macros")); setOKButtonText(IdeBundle.message("button.insert")); myMacrosModel = new DefaultListModel(); myMacrosList = new JList(myMacrosModel); myPreviewTextarea = new JTextArea(); init(); } protected void init() { super.init(); java.util.List<Macro> macros = new ArrayList<Macro>(MacroManager.getInstance().getMacros()); Collections.sort(macros, new Comparator<Macro>() { public int compare(Macro macro1, Macro macro2) { String name1 = macro1.getName(); String name2 = macro2.getName(); if (!StringUtil.startsWithChar(name1, '/')) { name1 = ZERO + name1; } if (!StringUtil.startsWithChar(name2, '/')) { name2 = ZERO + name2; } return name1.compareToIgnoreCase(name2); } private final String ZERO = new String(new char[] {0}); }); for (Macro macro : macros) { myMacrosModel.addElement(new MacroWrapper(macro)); } addListeners(); if (myMacrosModel.size() > 0){ myMacrosList.setSelectedIndex(0); } else{ setOKActionEnabled(false); } } protected Action[] createActions() { return new Action[]{getOKAction(),getCancelAction(),getHelpAction()}; } protected void doHelpAction() { HelpManager.getInstance().invokeHelp("preferences.externalToolsMacro"); } protected String getDimensionServiceKey(){ return "#com.intellij.ide.macro.MacrosDialog"; } protected JComponent createCenterPanel() { JPanel panel = new JPanel(new GridBagLayout()); GridBagConstraints constr; // list label constr = new GridBagConstraints(); constr.gridy = 0; constr.anchor = GridBagConstraints.WEST; constr.insets = new Insets(5, 5, 0, 5); panel.add(new JLabel(IdeBundle.message("label.macros")), constr); // macros list constr = new GridBagConstraints(); constr.gridy = 1; constr.weightx = 1; constr.weighty = 1; constr.insets = new Insets(0, 5, 0, 5); constr.fill = GridBagConstraints.BOTH; constr.anchor = GridBagConstraints.WEST; panel.add(new JScrollPane(myMacrosList), constr); myMacrosList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); myMacrosList.setPreferredSize(null); // preview label constr = new GridBagConstraints(); constr.gridx = 0; constr.gridy = 2; constr.anchor = GridBagConstraints.WEST; constr.insets = new Insets(5, 5, 0, 5); panel.add(new JLabel(IdeBundle.message("label.macro.preview")), constr); // preview constr = new GridBagConstraints(); constr.gridx = 0; constr.gridy = 3; constr.weightx = 1; constr.weighty = 1; constr.fill = GridBagConstraints.BOTH; constr.anchor = GridBagConstraints.WEST; constr.insets = new Insets(0, 5, 5, 5); panel.add(new JScrollPane(myPreviewTextarea), constr); myPreviewTextarea.setEditable(false); myPreviewTextarea.setLineWrap(true); myPreviewTextarea.setPreferredSize(null); panel.setPreferredSize(new Dimension(400, 500)); return panel; } protected JComponent createNorthPanel() { return null; } /** * Macro info shown in list */ private static final class MacroWrapper { private final Macro myMacro; public MacroWrapper(Macro macro) { myMacro = macro; } public String toString() { return myMacro.getName() + " - " + myMacro.getDescription(); } } private void addListeners() { myMacrosList.getSelectionModel().addListSelectionListener( new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { Macro macro = getSelectedMacro(); if (macro == null){ myPreviewTextarea.setText(""); setOKActionEnabled(false); } else{ myPreviewTextarea.setText(macro.preview()); setOKActionEnabled(true); } } } ); // doubleclick support myMacrosList.addMouseListener( new MouseAdapter() { public void mouseClicked(MouseEvent e) { if ((e.getClickCount() == 2) && (getSelectedMacro() != null)){ close(OK_EXIT_CODE); } } } ); } public Macro getSelectedMacro() { MacroWrapper macroWrapper = (MacroWrapper)myMacrosList.getSelectedValue(); if (macroWrapper != null){ return macroWrapper.myMacro; } return null; } public JComponent getPreferredFocusedComponent() { return myMacrosList; } }
package ualberta.cmput301w16t16.glamorousborrowingwhaleapp; import android.app.Application; import android.test.ApplicationTestCase; import java.lang.reflect.Array; import java.util.ArrayList; /** * Created by Martina on 16-03-13. */ public class ElasticSearchTest extends ApplicationTestCase<Application> { public ElasticSearchTest() { super(Application.class); } // assert that a user can be added to the elasticsearch public void testElasticAddUser() { // creating the user and their attributes Item item1 = new Item(); item1.setTitle("baseball bat"); Item item2 = new Item(); item2.setTitle("skates"); ItemList ib = new ItemList(); ib.add(item1); ItemList ir = new ItemList(); ir.add(item2); String username = "rosey"; User user = new User(username, "rosey@gmail.com", "123-456-7890"); user.setItemsBorrowing(ib); user.setItemsRenting(ir); new ElasticSearch.elasticAddUser().execute(user); // assert that the user is now in elastic search and can be retrieved // This is producing an error //User returnedUser = ElasticSearch.elasticGetUserByName().execute(username); //assertEquals(user, returnedUser); } // assert that a user can be retrieved using their username public void testElasticGetUser() { // creating the first user and their attributes Item item1 = new Item(); item1.setTitle("baseball bat"); Item item2 = new Item(); item2.setTitle("skates"); ItemList ib = new ItemList(); ib.add(item1); ItemList ir = new ItemList(); ir.add(item2); String username = "rosey"; User user = new User(username, "rosey@gmail.com", "123-456-7890"); user.setItemsBorrowing(ib); user.setItemsRenting(ir); // adding the user to elastic search new ElasticSearch.elasticAddUser().execute(user); // <not executed correctly> // assert that the user is now in elastic search and can be retrieved // This is producing an error //User returnedUser = ElasticSearch.elasticGetUser().execute(username); //assertEquals(user, returnedUser); // creating the second user and their attributes Item item3 = new Item(); item3.setTitle("baseball bat"); Item item4 = new Item(); item4.setTitle("skates"); ItemList ib2 = new ItemList(); ib2.add(item3); ItemList ir2 = new ItemList(); ir2.add(item4); String username2 = "arrow"; User user2 = new User(username2, "arrow@gmail.com", "123-456-7890"); user.setItemsBorrowing(ib2); user.setItemsRenting(ir2); // adding the second user new ElasticSearch.elasticAddUser().execute(user); // <these below are not executed correctly> // assert that a specific user can be retrieved from elastic search from multiple options //User returnedUser2 = ElasticSearch.elasticGetUser().execute(username2); //assertEquals(user2, returnedUser2); // assert that nothing is returned if the user is not in elastic search // This is producing an error // User returnedUser3 = ElasticSearch.elasticGetUser().execute("NoSuchUsername"); // assertNull(returnedUser3); } // assert that the correct items are returned as search results public void testElasticGetItems() { Item item1 = new Item(); item1.setTitle("one"); Item item2 = new Item(); item2.setTitle("two"); Item item3 = new Item(); item3.setTitle("three"); // assert that nothing is returned when elastic search is empty ArrayList<String> params = new ArrayList<String>(); params.add("one"); // This is producing an error // ItemList items = new ElasticSearch.elasticGetItems().execute(params); ItemList expectedResult = new ItemList(); //assertEquals(items, expectedResult); // adding all three items to the elastic search new ElasticSearch.elasticAddItem().execute(item1); new ElasticSearch.elasticAddItem().execute(item2); new ElasticSearch.elasticAddItem().execute(item3); // assert that the correct items are returned when searching params.clear(); params.add("one"); // This is producing an error //ItemList items1 = new ElasticSearch.elasticGetItems().execute(params); expectedResult.clear(); expectedResult.add(item1); //assertEquals(items, expectedResult); //assert that nothing is returned if there is nothing matching in elastic search params.clear(); params.add("four"); // This is producing an error // ItemList items2 = new ElasticSearch.elasticGetItems().execute(params); expectedResult.clear(); //assertEquals(items2, expectedResult); } // assert that items can be added to elastic search public void testElasticAddItem() { Item item = new Item(); item.setTitle("one"); new ElasticSearch.elasticAddItem().execute(item); // assert that the item was added to elastic search ArrayList<String> params = new ArrayList<String>(); params.add("one"); // This is producing an error //ItemList items = new ElasticSearch.elasticGetItems().execute(params); //assertTrue(items.hasItem(item)); } // assert that an item or user can successfully be deleted from elastic search public void testElasticDelete() { // assert that an item can be deleted from elastic search Item item = new Item(); item.setTitle("one"); new ElasticSearch.elasticAddItem().execute(item); // assert that the item was added to elastic search ArrayList<String> params = new ArrayList<String>(); params.add("one"); // This is producing an error //ItemList items = new ElasticSearch.elasticGetItems().execute(params); //assertTrue(items.hasItem(item)); // delete the item from elastic search // This is producing an error //new ElasticSearch.elasticDelete().execute(item); // This is producing an error //ItemList items2 = new ElasticSearch.elasticGetItems().execute(params); //assertFalse(items2.hasItem(item)); // assert that a user can be deleted from elastic search // creating the first user and their attributes Item item1 = new Item(); item1.setTitle("baseball bat"); Item item2 = new Item(); item2.setTitle("skates"); ItemList ib = new ItemList(); ib.add(item1); ItemList ir = new ItemList(); ir.add(item2); String username = "rosey"; User user = new User(username, "rosey@gmail.com", "123-456-7890"); user.setItemsBorrowing(ib); user.setItemsRenting(ir); // adding the user to elastic search new ElasticSearch.elasticAddUser().execute(user); // <not executed correctly> // assert that the user is now in elastic search and can be deleted // This is producing an error // User returnedUser = new ElasticSearch.elasticGetUser().execute(username); // assertEquals(user, returnedUser); // deleting the user from elastic search // This is producing an error // new ElasticSearch.elasticDelete().execute(username); // assert that the user is no longer in elastic search // This is producing an error // User returnedUser2 = new ElasticSearch.elasticGetUser().execute(username); // assertNull(returnedUser); } public void testElasticFind() { } }
/********************************************************************************** * $URL:https://source.sakaiproject.org/svn/osp/trunk/common/tool-lib/src/java/org/theospi/portfolio/guidance/tool/GuidanceTool.java $ * $Id:GuidanceTool.java 9134 2006-05-08 20:28:42Z chmaurer@iupui.edu $ *********************************************************************************** * * Copyright (c) 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.theospi.portfolio.guidance.tool; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.faces.context.ExternalContext; import javax.faces.context.FacesContext; import org.sakaiproject.content.api.FilePickerHelper; import org.sakaiproject.content.api.ResourceEditingHelper; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentHostingService; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.cover.EntityManager; import org.sakaiproject.tool.api.Placement; import org.sakaiproject.tool.api.ToolSession; import org.sakaiproject.tool.cover.SessionManager; import org.sakaiproject.tool.cover.ToolManager; import org.sakaiproject.metaobj.shared.FormHelper; import org.sakaiproject.exception.TypeException; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.user.api.User; import org.sakaiproject.user.cover.UserDirectoryService; import org.sakaiproject.site.cover.SiteService; import org.theospi.portfolio.guidance.mgt.GuidanceHelper; import org.theospi.portfolio.guidance.mgt.GuidanceManager; import org.theospi.portfolio.guidance.model.Guidance; import org.theospi.portfolio.guidance.model.GuidanceItem; import org.theospi.portfolio.guidance.model.GuidanceItemAttachment; import org.theospi.portfolio.shared.tool.HelperToolBase; /** * Created by IntelliJ IDEA. * User: John Ellis * Date: Nov 11, 2005 * Time: 3:33:43 PM * To change this template use File | Settings | File Templates. */ public class GuidanceTool extends HelperToolBase { private DecoratedGuidance current = null; private String formTypeId = null; private String formId = null; private String formDisplayName = null; private boolean showExamples = true; private boolean showInstructions = true; private boolean showRationale = true; private boolean showRubric = true; private boolean showExpectations = true; private GuidanceManager guidanceManager; private ContentHostingService contentHostingService; public static final String ATTACHMENT_TYPE = "org.theospi.portfolio.guidance.attachmentType"; public GuidanceManager getGuidanceManager() { return guidanceManager; } public void setGuidanceManager(GuidanceManager guidanceManager) { this.guidanceManager = guidanceManager; } public ContentHostingService getContentHostingService() { return contentHostingService; } public void setContentHostingService(ContentHostingService contentHostingService) { this.contentHostingService = contentHostingService; } public String getGuidanceInstructions() { return getCurrent().getBase().getDescription(); } public DecoratedGuidance getCurrent() { ToolSession session = SessionManager.getCurrentToolSession(); if (session.getAttribute(GuidanceManager.CURRENT_GUIDANCE_ID) != null) { String id = (String)session.getAttribute(GuidanceManager.CURRENT_GUIDANCE_ID); current = new DecoratedGuidance(this, getGuidanceManager().getGuidance(id)); session.removeAttribute(GuidanceManager.CURRENT_GUIDANCE_ID); } else if (session.getAttribute(GuidanceManager.CURRENT_GUIDANCE) != null) { current = new DecoratedGuidance(this, (Guidance)session.getAttribute(GuidanceManager.CURRENT_GUIDANCE)); guidanceManager.assureAccess(current.getBase()); session.removeAttribute(GuidanceManager.CURRENT_GUIDANCE); } else if (current != null) { guidanceManager.assureAccess(current.getBase()); } return current; } public String processActionManageAttachments(String type) { ExternalContext context = FacesContext.getCurrentInstance().getExternalContext(); ToolSession session = SessionManager.getCurrentToolSession(); session.setAttribute(FilePickerHelper.FILE_PICKER_ATTACH_LINKS, new Boolean(true).toString()); session.setAttribute(GuidanceTool.ATTACHMENT_TYPE, type); GuidanceItem item = getCurrent().getBase().getItem(type); List attachments = item.getAttachments(); List attachmentRefs = EntityManager.newReferenceList(); for (Iterator i=attachments.iterator();i.hasNext();) { GuidanceItemAttachment attachment = (GuidanceItemAttachment)i.next(); attachmentRefs.add(attachment.getBaseReference().getBase()); } session.setAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS, attachmentRefs); try { context.redirect("sakai.filepicker.helper/tool"); } catch (IOException e) { throw new RuntimeException("Failed to redirect to helper", e); } return null; } public String processActionSave() { getGuidanceManager().saveGuidance(getCurrent().getBase()); ToolSession session = SessionManager.getCurrentToolSession(); session.setAttribute(GuidanceManager.CURRENT_GUIDANCE, getCurrent().getBase()); cleanup(session); return returnToCaller(); } public String processActionCancel() { ToolSession session = SessionManager.getCurrentToolSession(); session.removeAttribute(GuidanceManager.CURRENT_GUIDANCE); session.removeAttribute(GuidanceManager.CURRENT_GUIDANCE_ID); cleanup(session); current = null; return returnToCaller(); } protected void cleanup(ToolSession toolSession) { toolSession.removeAttribute(GuidanceHelper.SHOW_EXAMPLE_FLAG); toolSession.removeAttribute(GuidanceHelper.SHOW_INSTRUCTION_FLAG); toolSession.removeAttribute(GuidanceHelper.SHOW_RATIONALE_FLAG); toolSession.removeAttribute(GuidanceHelper.SHOW_RUBRIC_FLAG); toolSession.removeAttribute(GuidanceHelper.SHOW_EXPECTATIONS_FLAG); toolSession.removeAttribute(GuidanceHelper.CONTEXT); toolSession.removeAttribute(GuidanceHelper.CONTEXT2); } public Reference decorateReference(String reference) { return getGuidanceManager().decorateReference(getCurrent().getBase(), reference); } /** * sample * @return */ public String getLastSavedId() { ToolSession session = SessionManager.getCurrentToolSession(); Guidance guidance = (Guidance) session.getAttribute(GuidanceManager.CURRENT_GUIDANCE); if (guidance != null) { session.removeAttribute(GuidanceManager.CURRENT_GUIDANCE); return guidance.getId().getValue(); } return "none"; } /** * sample * @return */ public List getSampleGuidances() { Placement placement = ToolManager.getCurrentPlacement(); String currentSiteId = placement.getContext(); List returned = new ArrayList(); List orig = getGuidanceManager().listGuidances(currentSiteId); for (Iterator i=orig.iterator();i.hasNext();) { Guidance guidance = (Guidance)i.next(); returned.add(new DecoratedGuidance(this, guidance)); } return returned; } public boolean isInstructionsRendered() { //boolean showInstructions = true; if (getAttribute(GuidanceHelper.SHOW_INSTRUCTION_FLAG) != null) { if (getAttribute(GuidanceHelper.SHOW_INSTRUCTION_FLAG) instanceof Boolean) { showInstructions = ((Boolean) getAttribute(GuidanceHelper.SHOW_INSTRUCTION_FLAG)).booleanValue(); } else { showInstructions = "true".equalsIgnoreCase((String) getAttribute(GuidanceHelper.SHOW_INSTRUCTION_FLAG)); } removeAttribute(GuidanceHelper.SHOW_INSTRUCTION_FLAG); } return showInstructions; } public boolean isExamplesRendered() { //boolean showExamples = true; if (getAttribute(GuidanceHelper.SHOW_EXAMPLE_FLAG) != null) { if (getAttribute(GuidanceHelper.SHOW_EXAMPLE_FLAG) instanceof Boolean) { showExamples = ((Boolean) getAttribute(GuidanceHelper.SHOW_EXAMPLE_FLAG)).booleanValue(); } else { showExamples = "true".equalsIgnoreCase((String) getAttribute(GuidanceHelper.SHOW_EXAMPLE_FLAG)); } removeAttribute(GuidanceHelper.SHOW_EXAMPLE_FLAG); } return showExamples; } public boolean isRationaleRendered() { //boolean showRationale = true; if (getAttribute(GuidanceHelper.SHOW_RATIONALE_FLAG) != null) { if (getAttribute(GuidanceHelper.SHOW_RATIONALE_FLAG) instanceof Boolean) { showRationale = ((Boolean) getAttribute(GuidanceHelper.SHOW_RATIONALE_FLAG)).booleanValue(); } else { showRationale = "true".equalsIgnoreCase((String) getAttribute(GuidanceHelper.SHOW_RATIONALE_FLAG)); } removeAttribute(GuidanceHelper.SHOW_RATIONALE_FLAG); } return showRationale; } public boolean isRubricRendered() { //boolean showRationale = true; if (getAttribute(GuidanceHelper.SHOW_RUBRIC_FLAG) != null) { if (getAttribute(GuidanceHelper.SHOW_RUBRIC_FLAG) instanceof Boolean) { showRubric = ((Boolean) getAttribute(GuidanceHelper.SHOW_RUBRIC_FLAG)).booleanValue(); } else { showRubric = "true".equalsIgnoreCase((String) getAttribute(GuidanceHelper.SHOW_RUBRIC_FLAG)); } removeAttribute(GuidanceHelper.SHOW_RUBRIC_FLAG); } return showRubric; } public boolean isExpectationsRendered() { //boolean showRationale = true; if (getAttribute(GuidanceHelper.SHOW_EXPECTATIONS_FLAG) != null) { if (getAttribute(GuidanceHelper.SHOW_EXPECTATIONS_FLAG) instanceof Boolean) { showExpectations = ((Boolean) getAttribute(GuidanceHelper.SHOW_EXPECTATIONS_FLAG)).booleanValue(); } else { showExpectations = "true".equalsIgnoreCase((String) getAttribute(GuidanceHelper.SHOW_EXPECTATIONS_FLAG)); } removeAttribute(GuidanceHelper.SHOW_EXPECTATIONS_FLAG); } return showExpectations; } /** * sample * @param guidance * @return */ public String processActionEdit(Guidance guidance) { guidance = getGuidanceManager().getGuidance(guidance.getId()); invokeTool(guidance, new HashMap()); return null; } public String processActionEditInstruction(Guidance guidance) { guidance = getGuidanceManager().getGuidance(guidance.getId()); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(guidance, typeFlags); return null; } public String processActionEditExample(Guidance guidance) { guidance = getGuidanceManager().getGuidance(guidance.getId()); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(guidance, typeFlags); return null; } public String processActionEditRationale(Guidance guidance) { guidance = getGuidanceManager().getGuidance(guidance.getId()); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(guidance, typeFlags); return null; } public String processActionEditRubric(Guidance guidance) { guidance = getGuidanceManager().getGuidance(guidance.getId()); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(guidance, typeFlags); return null; } public String processActionEditExpectations(Guidance guidance) { guidance = getGuidanceManager().getGuidance(guidance.getId()); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "true"); invokeTool(guidance, typeFlags); return null; } /** * sample * @param guidance * @return */ public String processActionDelete(Guidance guidance) { getGuidanceManager().deleteGuidance(guidance); current = null; return "list"; } /** * sample * @param guidance * @return */ public String processActionView(Guidance guidance) { guidance = getGuidanceManager().getGuidance(guidance.getId()); invokeToolView(guidance.getId().getValue()); return null; } /** * sample * @return */ public String processActionNew() { Placement placement = ToolManager.getCurrentPlacement(); String currentSite = placement.getContext(); Guidance newGuidance = getGuidanceManager().createNew("Sample Guidance", currentSite, null, "", ""); invokeTool(newGuidance, new HashMap()); return null; } public String processActionNewInstruction() { Placement placement = ToolManager.getCurrentPlacement(); String currentSite = placement.getContext(); Guidance newGuidance = getGuidanceManager().createNew("Sample Guidance", currentSite, null, "", ""); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(newGuidance, typeFlags); return null; } public String processActionNewExample() { Placement placement = ToolManager.getCurrentPlacement(); String currentSite = placement.getContext(); Guidance newGuidance = getGuidanceManager().createNew("Sample Guidance", currentSite, null, "", ""); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(newGuidance, typeFlags); return null; } public String processActionNewRationale() { Placement placement = ToolManager.getCurrentPlacement(); String currentSite = placement.getContext(); Guidance newGuidance = getGuidanceManager().createNew("Sample Guidance", currentSite, null, "", ""); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(newGuidance, typeFlags); return null; } public String processActionNewRubric() { Placement placement = ToolManager.getCurrentPlacement(); String currentSite = placement.getContext(); Guidance newGuidance = getGuidanceManager().createNew("Sample Guidance", currentSite, null, "", ""); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "true"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "false"); invokeTool(newGuidance, typeFlags); return null; } public String processActionNewExpectations() { Placement placement = ToolManager.getCurrentPlacement(); String currentSite = placement.getContext(); Guidance newGuidance = getGuidanceManager().createNew("Sample Guidance", currentSite, null, "", ""); Map typeFlags = new HashMap(); typeFlags.put(GuidanceHelper.SHOW_RATIONALE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXAMPLE_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_INSTRUCTION_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_RUBRIC_FLAG, "false"); typeFlags.put(GuidanceHelper.SHOW_EXPECTATIONS_FLAG, "true"); invokeTool(newGuidance, typeFlags); return null; } /** * sample * @param guidance */ protected void invokeTool(Guidance guidance, Map typeFlags) { ExternalContext context = FacesContext.getCurrentInstance().getExternalContext(); ToolSession session = SessionManager.getCurrentToolSession(); session.setAttribute(GuidanceManager.CURRENT_GUIDANCE, guidance); for (Iterator iter = typeFlags.entrySet().iterator(); iter.hasNext();) { Entry entry = (Entry) iter.next(); session.setAttribute(entry.getKey().toString(), entry.getValue().toString()); } try { context.redirect("osp.guidance.helper/tool"); } catch (IOException e) { throw new RuntimeException("Failed to redirect to helper", e); } } /** * sample * @param id */ protected void invokeToolView(String id) { ExternalContext context = FacesContext.getCurrentInstance().getExternalContext(); ToolSession session = SessionManager.getCurrentToolSession(); session.setAttribute(GuidanceManager.CURRENT_GUIDANCE_ID, id); try { context.redirect("osp.guidance.helper/view"); } catch (IOException e) { throw new RuntimeException("Failed to redirect to helper", e); } } public String processTestResourceHelper() { ExternalContext context = FacesContext.getCurrentInstance().getExternalContext(); ToolSession session = SessionManager.getCurrentToolSession(); session.removeAttribute(ResourceEditingHelper.ATTACHMENT_ID); session.setAttribute(ResourceEditingHelper.CREATE_TYPE, ResourceEditingHelper.CREATE_TYPE_FORM); session.setAttribute(ResourceEditingHelper.CREATE_SUB_TYPE, formTypeId); session.setAttribute(FormHelper.NEW_FORM_DISPLAY_NAME_TAG, getFormDisplayName()); try { session.setAttribute(FormHelper.PARENT_ID_TAG, getUserCollection().getId()); context.redirect("sakai.metaobj.form.helper/formHelper"); } catch (IOException e) { throw new RuntimeException("Failed to redirect to helper", e); } catch (IdUnusedException e) { throw new RuntimeException("Failed to redirect to helper", e); } catch (TypeException e) { throw new RuntimeException("Failed to redirect to helper", e); } catch (PermissionException e) { throw new RuntimeException("Failed to redirect to helper", e); } return null; } public String processTestResourceEditHelper() { ExternalContext context = FacesContext.getCurrentInstance().getExternalContext(); ToolSession session = SessionManager.getCurrentToolSession(); session.removeAttribute(ResourceEditingHelper.CREATE_TYPE); session.removeAttribute(ResourceEditingHelper.CREATE_SUB_TYPE); session.removeAttribute(ResourceEditingHelper.CREATE_PARENT); session.setAttribute(ResourceEditingHelper.CREATE_TYPE, ResourceEditingHelper.CREATE_TYPE_FORM); session.setAttribute(ResourceEditingHelper.ATTACHMENT_ID, getFormId()); try { context.redirect("sakai.metaobj.form.helper/formEditHelper"); } catch (IOException e) { throw new RuntimeException("Failed to redirect to helper", e); } return null; } public String processTestResourceViewHelper() { ExternalContext context = FacesContext.getCurrentInstance().getExternalContext(); ToolSession session = SessionManager.getCurrentToolSession(); session.removeAttribute(ResourceEditingHelper.CREATE_TYPE); session.removeAttribute(ResourceEditingHelper.CREATE_SUB_TYPE); session.removeAttribute(ResourceEditingHelper.CREATE_PARENT); session.setAttribute(ResourceEditingHelper.CREATE_TYPE, ResourceEditingHelper.CREATE_TYPE_FORM); session.setAttribute(ResourceEditingHelper.ATTACHMENT_ID, getFormId()); try { context.redirect("sakai.metaobj.formView.helper/formView.osp"); } catch (IOException e) { throw new RuntimeException("Failed to redirect to helper", e); } return null; } public String getFormTypeId() { return formTypeId; } public void setFormTypeId(String formTypeId) { this.formTypeId = formTypeId; } public String getFormId() { return formId; } public void setFormId(String formId) { this.formId = formId; } protected ContentCollection getUserCollection() throws TypeException, IdUnusedException, PermissionException { User user = UserDirectoryService.getCurrentUser(); String userId = user.getId(); String wsId = SiteService.getUserSiteId(userId); String wsCollectionId = getContentHostingService().getSiteCollection(wsId); ContentCollection collection = getContentHostingService().getCollection(wsCollectionId); return collection; } public String getFormDisplayName() { return formDisplayName; } public void setFormDisplayName(String formDisplayName) { this.formDisplayName = formDisplayName; } /** * Context (GuidanceHelper.CONTEXT) is used to describe the page/tool * that is being used in this helper. Context is the main title (ex. matrix or wizard name) * and context 2 is used for the subtitle (ex. matrix cell or wizard page). If left * blank, then nothing displays on the page. * @return */ public String getPageContext(){ String context = (String) getAttribute(GuidanceHelper.CONTEXT); return context != null ? context : ""; } /** * Context2 (GuidanceHelper.CONTEXT2) is used to describe the page/tool * that is being used in this helper. Context is the main title (ex. matrix or wizard name) * and Context2 is used for the subtitle (ex. matrix cell or wizard page). If left * blank, then nothing displays on the page. * @return */ public String getPageContext2(){ String context2 = (String) getAttribute(GuidanceHelper.CONTEXT2); return context2 != null ? context2 : ""; } }
package eta.serv; import java.net.URL; import java.net.URLClassLoader; import java.net.MalformedURLException; import java.io.File; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.channels.Channel; import java.nio.channels.Channels; import java.util.List; import java.util.ListIterator; import java.util.Arrays; import java.util.ArrayList; import java.util.LinkedList; import java.util.Iterator; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.security.ProtectionDomain; import java.lang.reflect.InvocationTargetException; // import java.lang.management.ThreadMXBean; // import java.lang.management.ThreadInfo; // import java.lang.management.ManagementFactory; public class REPLClassLoader extends URLClassLoader { static { registerAsParallelCapable(); /* NOTE: Uncomment below if this server process is mysteriously shutting down. Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { System.err.println("SHUTTING DOWN!"); ThreadMXBean threadMxBean = ManagementFactory.getThreadMXBean(); for (ThreadInfo threadInfo : threadMxBean.dumpAllThreads(true, true)) { System.err.print(threadInfo.toString()); } } }); */ } private static final REPLClassLoader parentReplClassLoader = new REPLClassLoader(); private static ChildREPLClassLoader replClassLoader = new ChildREPLClassLoader(); private static final REPLClassLoader classQueryClassLoader = new REPLClassLoader(); private static class ChildREPLClassLoader extends REPLClassLoader { static { registerAsParallelCapable(); } public ChildREPLClassLoader() { super(parentReplClassLoader); } @Override public Class<?> loadClass(String name) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { Class<?> clazz = findLoadedClass(name); if (clazz == null) { try { clazz = getParent().loadClass(name); } catch (ClassNotFoundException e) { clazz = findClass(name); } } return clazz; } } } private REPLClassLoader() { super(new URL[0]); } private REPLClassLoader(ClassLoader parent) { super(new URL[0], parent); } @Override public Class<?> loadClass(String name) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { Class<?> clazz = findLoadedClass(name); if (clazz == null) { /* This class loader inverts the normal delegation order by attempting to resolve it first. This is to prevent loading the Eta Runtime library from the system classloader which conflicts with the version eta-serv itself is using. */ try { clazz = findClass(name); } catch (ClassNotFoundException e) { clazz = getParent().loadClass(name); } } return clazz; } } public static void addURLs(String[] paths) throws MalformedURLException { for (String path: paths) { parentReplClassLoader.addURL(toClassPathURL(path)); } } public static void addChildURLs(String[] paths) throws MalformedURLException { for (String path: paths) { replClassLoader.addURL(toClassPathURL(path)); } } public static void setClassInfoPath(String[] paths) throws MalformedURLException { for (String path: paths) { classQueryClassLoader.addURL(toClassPathURL(path)); } } private static URL toClassPathURL(String path) throws MalformedURLException { return new File(path).toURI().toURL(); } public static void loadClasses(String[] classNames, List<ByteBuffer> classes) { ArrayList<Class<?>> newClasses = new ArrayList<Class<?>>(classNames.length); Iterator<ByteBuffer> classIt = classes.iterator(); for (int i = 0; i < classNames.length && classIt.hasNext(); i++) { newClasses.add(replClassLoader.defineClass(fixClassName(classNames[i]), classIt.next(), (ProtectionDomain)null)); } Iterator<Class<?>> it = newClasses.iterator(); while (it.hasNext()) { replClassLoader.resolveClass(it.next()); } } public static Object newInstance(String className, String methodName) throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, IllegalAccessException { return replClassLoader.loadClass(fixClassName(className)).getMethod(methodName).invoke(null); } private static String fixClassName(String name) { return name.replace("/", "."); } public static void resetClasses() { replClassLoader = new ChildREPLClassLoader(); } private static Class<?> runtimeClass; private static Class<?> closureClass; private static Class<?> closuresClass; private static Class<?> ZCClass; private static Field ZCx1Field; private static Field ZCx2Field; private static Class<?> ZMZNClass; private static Method applyMethod; private static Method evalIOMethod; private static Method evaluateMethod; private static Field Czhx1Field; private static Class<?> CzhClass; private static Constructor CzhConstructor; private static Constructor ZCConstructor; private static Class<?> W64zhClass; private static Constructor W64zhConstructor; private static Object ZMZNSingleton; public static void lazyInit() { if (closureClass == null) { try { runtimeClass = replClassLoader.loadClass("eta.runtime.Runtime"); closureClass = replClassLoader.loadClass("eta.runtime.stg.Closure"); closuresClass = replClassLoader.loadClass("eta.runtime.stg.Closures"); ZCClass = replClassLoader.loadClass("ghc_prim.ghc.types.datacons.ZC"); ZMZNClass = replClassLoader.loadClass("ghc_prim.ghc.types.datacons.ZMZN"); ZCx1Field = ZCClass.getField("x1"); ZCx2Field = ZCClass.getField("x2"); applyMethod = closuresClass.getMethod("apply", closureClass, closureClass); evalIOMethod = runtimeClass.getMethod("evalIO", closureClass); evaluateMethod = runtimeClass.getMethod("evaluate", closureClass); CzhClass = replClassLoader.loadClass("ghc_prim.ghc.types.datacons.Czh"); Czhx1Field = CzhClass.getField("x1"); CzhConstructor = CzhClass.getConstructor(Integer.TYPE); ZCConstructor = ZCClass.getConstructor(closureClass, closureClass); W64zhClass = replClassLoader.loadClass("base.ghc.word.datacons.W64zh"); W64zhConstructor = W64zhClass.getConstructor(long.class); ZMZNSingleton = replClassLoader.loadClass("ghc_prim.ghc.Types") .getMethod("DZMZN").invoke(null); } catch (Exception e) { throw new RuntimeException("Failed during Eta REPL initialization", e); } } } private static Method apply3Method; private static Method startTHMethod; private static Method runTHMethod; private static Method runModFinalizerRefsMethod; private static Constructor jbyteArrayConstructor; public static void lazyInitTH() { lazyInit(); if (apply3Method == null) { try { Class<?> serverClass = replClassLoader.loadClass("eta_meta.language.eta.meta.Server"); apply3Method = replClassLoader.loadClass("eta.runtime.stg.Closures") .getMethod("apply", closureClass, closureClass, closureClass, closureClass); startTHMethod = serverClass.getMethod("startTH"); runTHMethod = serverClass.getMethod("runTH"); runModFinalizerRefsMethod = serverClass.getMethod("runModFinalizzerRefs"); jbyteArrayConstructor = replClassLoader .loadClass("ghc_prim.ghc.cstring.datacons.JByteArray") .getConstructor(byte[].class); } catch (Exception e) { throw new RuntimeException("Failed during Eta REPL TH initialization", e); } } } public static Object apply(Object e1, Object e2) { lazyInit(); try { return applyMethod.invoke(null, e1, e2); } catch (Exception e) { throw new RuntimeException ("Failed during constructing Eta REPL expression", e); } } public static List<Object> evalStmt(Object e) { lazyInit(); List<Object> list = new LinkedList<Object>(); try { Object result = evalIOInternal(e); while (!ZMZNClass.isInstance(result)) { list.add(ZCx1Field.get(result)); result = ZCx2Field.get(result); } return list; } catch (Exception exc) { throw new RuntimeException ("Failed during evalStmt of Eta REPL expression", exc); } } public static void evalIO(Object e) { lazyInit(); try { evalIOInternal(e); } catch (Exception exc) { throw new RuntimeException ("Failed during evalIO of Eta REPL expression", exc); } } public static String evalString(Object e) { lazyInit(); try { return convertToString(evalIOInternal(e)); } catch (Exception exc) { throw new RuntimeException ("Failed during evalIO of Eta REPL expression", exc); } } public static String evalStringToString(Object e, String str) { lazyInit(); try { return convertToString(evalIOInternal(apply(e, convertFromString(str)))); } catch (Exception exc) { throw new RuntimeException ("Failed during evalIO of Eta REPL expression", exc); } } private static Object convertFromString(String str) throws InstantiationException, IllegalAccessException, InvocationTargetException { int off = 0; int len = str.length(); if (len <= 0) return ZMZNSingleton; int codepoint = 0; Object prevCurrent = null; Object current = ZCConstructor.newInstance(null, null); Object head = current; for (off = 0; off < len; off += Character.charCount(codepoint)) { codepoint = str.codePointAt(off); ZCx1Field.set(current, CzhConstructor.newInstance(codepoint)); Object next = ZCConstructor.newInstance(null, null); ZCx2Field.set(current, next); prevCurrent = current; current = next; } ZCx2Field.set(prevCurrent, ZMZNSingleton); return head; } private static String convertToString(Object result) throws IllegalAccessException, InvocationTargetException { StringBuilder sb = new StringBuilder(); while (!ZMZNClass.isInstance(result = evaluateMethod.invoke(null, result))) { sb.appendCodePoint((int) Czhx1Field .get(evaluateMethod .invoke(null, ZCx1Field.get(result)))); result = ZCx2Field.get(result); } return sb.toString(); } public static Object startTH() { lazyInitTH(); try { return evalIOInternal(startTHMethod.invoke(null)); } catch (Exception exc) { throw new RuntimeException("Failed during startTH", exc); } } public static void runTH(Object qstate, Object q, byte[] serialized) { lazyInitTH(); try { Object serialized_ = jbyteArrayConstructor.newInstance(serialized); evalIOInternal(apply3Method.invoke(null, runTHMethod.invoke(null), qstate, q, serialized_)); } catch (Exception exc) { throw new RuntimeException("Failed during runTH", exc); } } public static void runModFinalizerRefs(byte[] serialized, Object qstate, List<Long> qactions) { lazyInitTH(); try { Object serialized_ = jbyteArrayConstructor.newInstance(serialized); ListIterator<Long> it = qactions.listIterator(qactions.size()); Object qs = ZMZNSingleton; while (it.hasPrevious()) { qs = ZCConstructor.newInstance(W64zhConstructor.newInstance(it.previous()), qs); } evalIOInternal(apply3Method.invoke(null, runModFinalizerRefsMethod.invoke(null), serialized_, qstate, qs)); } catch (Exception exc) { throw new RuntimeException("Failed during runModFinalizerRefs", exc); } } public static Class<?> queryClass(String c) throws ClassNotFoundException { return Class.forName(c, false, classQueryClassLoader); } private static ByteArrayOutputStream baos; private static PrintStream sandboxedStream; private static InputStream oldStdIn; private static PrintStream oldStdOut; private static PrintStream oldStdErr; public static Object evalIOInternal(Object e) { // TODO: Add support for stdin too! oldStdIn = System.in; oldStdOut = System.out; oldStdErr = System.err; initSandbox(); try { return evalIOMethod.invoke(null, e); } catch (Exception exc) { throw new RuntimeException ("Failed during evalIOInternal of Eta REPL expression", exc); } finally { System.setOut(oldStdOut); System.setErr(oldStdErr); } } // TODO: Currently, stdin isn't sandboxed, so this will return the default stdin. public static Channel getOldStdIn() { return Channels.newChannel(oldStdIn); } public static Channel getOldStdOut() { return Channels.newChannel(oldStdOut); } public static byte[] getOutputBytes() { byte[] result = baos.toByteArray(); baos.reset(); return result; } private static void initSandbox() { if (sandboxedStream == null) { baos = new ByteArrayOutputStream(); sandboxedStream = new PrintStream(baos); } System.setOut(sandboxedStream); System.setErr(sandboxedStream); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import java.io.IOException; import java.util.Arrays; import java.util.Collection; /** */ public class LocalAllocateDangledIndices extends AbstractComponent { public static final String ACTION_NAME = "internal:gateway/local/allocate_dangled"; private final TransportService transportService; private final ClusterService clusterService; private final AllocationService allocationService; private final MetaDataIndexUpgradeService metaDataIndexUpgradeService; @Inject public LocalAllocateDangledIndices(Settings settings, TransportService transportService, ClusterService clusterService, AllocationService allocationService, MetaDataIndexUpgradeService metaDataIndexUpgradeService) { super(settings); this.transportService = transportService; this.clusterService = clusterService; this.allocationService = allocationService; this.metaDataIndexUpgradeService = metaDataIndexUpgradeService; transportService.registerRequestHandler(ACTION_NAME, AllocateDangledRequest.class, ThreadPool.Names.SAME, new AllocateDangledRequestHandler()); } public void allocateDangled(Collection<IndexMetaData> indices, final Listener listener) { ClusterState clusterState = clusterService.state(); DiscoveryNode masterNode = clusterState.nodes().masterNode(); if (masterNode == null) { listener.onFailure(new MasterNotDiscoveredException("no master to send allocate dangled request")); return; } AllocateDangledRequest request = new AllocateDangledRequest(clusterService.localNode(), indices.toArray(new IndexMetaData[indices.size()])); transportService.sendRequest(masterNode, ACTION_NAME, request, new TransportResponseHandler<AllocateDangledResponse>() { @Override public AllocateDangledResponse newInstance() { return new AllocateDangledResponse(); } @Override public void handleResponse(AllocateDangledResponse response) { listener.onResponse(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } public static interface Listener { void onResponse(AllocateDangledResponse response); void onFailure(Throwable e); } class AllocateDangledRequestHandler implements TransportRequestHandler<AllocateDangledRequest> { @Override public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel) throws Exception { String[] indexNames = new String[request.indices.length]; for (int i = 0; i < request.indices.length; i++) { indexNames[i] = request.indices[i].index(); } clusterService.submitStateUpdateTask("allocation dangled indices " + Arrays.toString(indexNames), new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { if (currentState.blocks().disableStatePersistence()) { return currentState; } MetaData.Builder metaData = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable()); boolean importNeeded = false; StringBuilder sb = new StringBuilder(); for (IndexMetaData indexMetaData : request.indices) { if (currentState.metaData().hasIndex(indexMetaData.index())) { continue; } if (currentState.metaData().aliases().containsKey(indexMetaData.index())) { logger.warn("ignoring dangled index [{}] on node [{}] due to an existing alias with the same name", indexMetaData.index(), request.fromNode); continue; } importNeeded = true; IndexMetaData upgradedIndexMetaData; try { // The dangled index might be from an older version, we need to make sure it's compatible // with the current version and upgrade it if needed. upgradedIndexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData); } catch (Exception ex) { // upgrade failed - adding index as closed logger.warn("found dangled index [{}] on node [{}]. This index cannot be upgraded to the latest version, adding as closed", ex, indexMetaData.index(), request.fromNode); upgradedIndexMetaData = IndexMetaData.builder(indexMetaData).state(IndexMetaData.State.CLOSE).version(indexMetaData.version() + 1).build(); } metaData.put(upgradedIndexMetaData, false); blocks.addBlocks(upgradedIndexMetaData); if (upgradedIndexMetaData.getState() == IndexMetaData.State.OPEN) { routingTableBuilder.addAsFromDangling(upgradedIndexMetaData); } sb.append("[").append(upgradedIndexMetaData.index()).append("/").append(upgradedIndexMetaData.state()).append("]"); } if (!importNeeded) { return currentState; } logger.info("auto importing dangled indices {} from [{}]", sb.toString(), request.fromNode); ClusterState updatedState = ClusterState.builder(currentState).metaData(metaData).blocks(blocks).routingTable(routingTableBuilder).build(); // now, reroute RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(updatedState).routingTable(routingTableBuilder).build()); return ClusterState.builder(updatedState).routingResult(routingResult).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); try { channel.sendResponse(t); } catch (Exception e) { logger.warn("failed send response for allocating dangled", e); } } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { try { channel.sendResponse(new AllocateDangledResponse(true)); } catch (IOException e) { logger.warn("failed send response for allocating dangled", e); } } }); } } static class AllocateDangledRequest extends TransportRequest { DiscoveryNode fromNode; IndexMetaData[] indices; AllocateDangledRequest() { } AllocateDangledRequest(DiscoveryNode fromNode, IndexMetaData[] indices) { this.fromNode = fromNode; this.indices = indices; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); fromNode = DiscoveryNode.readNode(in); indices = new IndexMetaData[in.readVInt()]; for (int i = 0; i < indices.length; i++) { indices[i] = IndexMetaData.Builder.readFrom(in); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); fromNode.writeTo(out); out.writeVInt(indices.length); for (IndexMetaData indexMetaData : indices) { indexMetaData.writeTo(out); } } } public static class AllocateDangledResponse extends TransportResponse { private boolean ack; AllocateDangledResponse() { } AllocateDangledResponse(boolean ack) { this.ack = ack; } public boolean ack() { return ack; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); ack = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(ack); } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.storage.hdfs; import com.google.common.base.Predicate; import com.google.common.base.Throwables; import com.google.common.io.ByteSource; import com.google.inject.Inject; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; import io.druid.java.util.common.RetryUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.UOE; import io.druid.java.util.common.io.NativeIO; import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.URIDataPuller; import io.druid.timeline.DataSegment; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import javax.tools.FileObject; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.net.URI; import java.util.concurrent.Callable; /** */ public class HdfsDataSegmentPuller implements DataSegmentPuller, URIDataPuller { public static final int DEFAULT_RETRY_COUNT = 3; /** * FileObject.getLastModified and FileObject.delete don't throw IOException. This allows us to wrap those calls */ public static class HdfsIOException extends RuntimeException { private final IOException cause; public HdfsIOException(IOException ex) { super(ex); this.cause = ex; } protected IOException getIOException() { return cause; } } public static FileObject buildFileObject(final URI uri, final Configuration config) { return buildFileObject(uri, config, false); } public static FileObject buildFileObject(final URI uri, final Configuration config, final Boolean overwrite) { return new FileObject() { final Path path = new Path(uri); @Override public URI toUri() { return uri; } @Override public String getName() { return path.getName(); } @Override public InputStream openInputStream() throws IOException { final FileSystem fs = path.getFileSystem(config); return fs.open(path); } @Override public OutputStream openOutputStream() throws IOException { final FileSystem fs = path.getFileSystem(config); return fs.create(path, overwrite); } @Override public Reader openReader(boolean ignoreEncodingErrors) throws IOException { throw new UOE("HDFS Reader not supported"); } @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { throw new UOE("HDFS CharSequence not supported"); } @Override public Writer openWriter() throws IOException { throw new UOE("HDFS Writer not supported"); } @Override public long getLastModified() { try { final FileSystem fs = path.getFileSystem(config); return fs.getFileStatus(path).getModificationTime(); } catch (IOException ex) { throw new HdfsIOException(ex); } } @Override public boolean delete() { try { final FileSystem fs = path.getFileSystem(config); return fs.delete(path, false); } catch (IOException ex) { throw new HdfsIOException(ex); } } }; } private static final Logger log = new Logger(HdfsDataSegmentPuller.class); protected final Configuration config; @Inject public HdfsDataSegmentPuller(final Configuration config) { this.config = config; } @Override public void getSegmentFiles(DataSegment segment, File dir) throws SegmentLoadingException { getSegmentFiles(getPath(segment), dir); } public FileUtils.FileCopyResult getSegmentFiles(final Path path, final File outDir) throws SegmentLoadingException { try { final FileSystem fs = path.getFileSystem(config); if (fs.isDirectory(path)) { // -------- directory --------- try { return RetryUtils.retry( new Callable<FileUtils.FileCopyResult>() { @Override public FileUtils.FileCopyResult call() throws Exception { if (!fs.exists(path)) { throw new SegmentLoadingException("No files found at [%s]", path.toString()); } final RemoteIterator<LocatedFileStatus> children = fs.listFiles(path, false); final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult(); while (children.hasNext()) { final LocatedFileStatus child = children.next(); final Path childPath = child.getPath(); final String fname = childPath.getName(); if (fs.isDirectory(childPath)) { log.warn("[%s] is a child directory, skipping", childPath.toString()); } else { final File outFile = new File(outDir, fname); try (final FSDataInputStream in = fs.open(childPath)) { NativeIO.chunkedCopy(in, outFile); } result.addFile(outFile); } } log.info( "Copied %d bytes from [%s] to [%s]", result.size(), path.toString(), outDir.getAbsolutePath() ); return result; } }, shouldRetryPredicate(), DEFAULT_RETRY_COUNT ); } catch (Exception e) { throw Throwables.propagate(e); } } else if (CompressionUtils.isZip(path.getName())) { // -------- zip --------- final FileUtils.FileCopyResult result = CompressionUtils.unzip( new ByteSource() { @Override public InputStream openStream() throws IOException { return getInputStream(path); } }, outDir, shouldRetryPredicate(), false ); log.info( "Unzipped %d bytes from [%s] to [%s]", result.size(), path.toString(), outDir.getAbsolutePath() ); return result; } else if (CompressionUtils.isGz(path.getName())) { // -------- gzip --------- final String fname = path.getName(); final File outFile = new File(outDir, CompressionUtils.getGzBaseName(fname)); final FileUtils.FileCopyResult result = CompressionUtils.gunzip( new ByteSource() { @Override public InputStream openStream() throws IOException { return getInputStream(path); } }, outFile ); log.info( "Gunzipped %d bytes from [%s] to [%s]", result.size(), path.toString(), outFile.getAbsolutePath() ); return result; } else { throw new SegmentLoadingException("Do not know how to handle file type at [%s]", path.toString()); } } catch (IOException e) { throw new SegmentLoadingException(e, "Error loading [%s]", path.toString()); } } public FileUtils.FileCopyResult getSegmentFiles(URI uri, File outDir) throws SegmentLoadingException { if (!uri.getScheme().equalsIgnoreCase(HdfsStorageDruidModule.SCHEME)) { throw new SegmentLoadingException("Don't know how to load SCHEME for URI [%s]", uri.toString()); } return getSegmentFiles(new Path(uri), outDir); } public InputStream getInputStream(Path path) throws IOException { return buildFileObject(path.toUri(), config).openInputStream(); } @Override public InputStream getInputStream(URI uri) throws IOException { if (!uri.getScheme().equalsIgnoreCase(HdfsStorageDruidModule.SCHEME)) { throw new IAE("Don't know how to load SCHEME [%s] for URI [%s]", uri.getScheme(), uri.toString()); } return buildFileObject(uri, config).openInputStream(); } /** * Return the "version" (aka last modified timestamp) of the URI * * @param uri The URI of interest * * @return The last modified timestamp of the uri in String format * * @throws IOException */ @Override public String getVersion(URI uri) throws IOException { try { return StringUtils.format("%d", buildFileObject(uri, config).getLastModified()); } catch (HdfsIOException ex) { throw ex.getIOException(); } } @Override public Predicate<Throwable> shouldRetryPredicate() { return new Predicate<Throwable>() { @Override public boolean apply(Throwable input) { if (input == null) { return false; } if (input instanceof HdfsIOException) { return true; } if (input instanceof IOException) { return true; } return apply(input.getCause()); } }; } private Path getPath(DataSegment segment) { return new Path(String.valueOf(segment.getLoadSpec().get("path"))); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package storm.kafka; import backtype.storm.Config; import backtype.storm.metric.api.CombinedMetric; import backtype.storm.metric.api.CountMetric; import backtype.storm.metric.api.MeanReducer; import backtype.storm.metric.api.ReducedMetric; import backtype.storm.spout.SpoutOutputCollector; import com.google.common.collect.ImmutableMap; import kafka.javaapi.consumer.SimpleConsumer; import kafka.javaapi.message.ByteBufferMessageSet; import kafka.message.MessageAndOffset; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import storm.kafka.KafkaSpout.EmitState; import storm.kafka.KafkaSpout.MessageAndRealOffset; import storm.kafka.trident.MaxMetric; import java.util.*; public class PartitionManager { public static final Logger LOG = LoggerFactory.getLogger(PartitionManager.class); private final CombinedMetric _fetchAPILatencyMax; private final ReducedMetric _fetchAPILatencyMean; private final CountMetric _fetchAPICallCount; private final CountMetric _fetchAPIMessageCount; Long _emittedToOffset; // _pending key = Kafka offset, value = time at which the message was first submitted to the topology private SortedMap<Long,Long> _pending = new TreeMap<Long,Long>(); private final FailedMsgRetryManager _failedMsgRetryManager; // retryRecords key = Kafka offset, value = retry info for the given message Long _committedTo; LinkedList<MessageAndRealOffset> _waitingToEmit = new LinkedList<MessageAndRealOffset>(); Partition _partition; SpoutConfig _spoutConfig; String _topologyInstanceId; SimpleConsumer _consumer; DynamicPartitionConnections _connections; ZkState _state; Map _stormConf; long numberFailed, numberAcked; public PartitionManager(DynamicPartitionConnections connections, String topologyInstanceId, ZkState state, Map stormConf, SpoutConfig spoutConfig, Partition id) { _partition = id; _connections = connections; _spoutConfig = spoutConfig; _topologyInstanceId = topologyInstanceId; _consumer = connections.register(id.host, id.partition); _state = state; _stormConf = stormConf; numberAcked = numberFailed = 0; _failedMsgRetryManager = new ExponentialBackoffMsgRetryManager(_spoutConfig.retryInitialDelayMs, _spoutConfig.retryDelayMultiplier, _spoutConfig.retryDelayMaxMs); String jsonTopologyId = null; Long jsonOffset = null; String path = committedPath(); try { Map<Object, Object> json = _state.readJSON(path); LOG.info("Read partition information from: " + path + " --> " + json ); if (json != null) { jsonTopologyId = (String) ((Map<Object, Object>) json.get("topology")).get("id"); jsonOffset = (Long) json.get("offset"); } } catch (Throwable e) { LOG.warn("Error reading and/or parsing at ZkNode: " + path, e); } Long currentOffset = KafkaUtils.getOffset(_consumer, spoutConfig.topic, id.partition, spoutConfig); if (jsonTopologyId == null || jsonOffset == null) { // failed to parse JSON? _committedTo = currentOffset; LOG.info("No partition information found, using configuration to determine offset"); } else if (!topologyInstanceId.equals(jsonTopologyId) && spoutConfig.forceFromStart) { _committedTo = KafkaUtils.getOffset(_consumer, spoutConfig.topic, id.partition, spoutConfig.startOffsetTime); LOG.info("Topology change detected and reset from start forced, using configuration to determine offset"); } else { _committedTo = jsonOffset; LOG.info("Read last commit offset from zookeeper: " + _committedTo + "; old topology_id: " + jsonTopologyId + " - new topology_id: " + topologyInstanceId ); } if (currentOffset - _committedTo > spoutConfig.maxOffsetBehind || _committedTo <= 0) { LOG.info("Last commit offset from zookeeper: " + _committedTo); _committedTo = currentOffset; LOG.info("Commit offset " + _committedTo + " is more than " + spoutConfig.maxOffsetBehind + " behind, resetting to startOffsetTime=" + spoutConfig.startOffsetTime); } LOG.info("Starting Kafka " + _consumer.host() + ":" + id.partition + " from offset " + _committedTo); _emittedToOffset = _committedTo; _fetchAPILatencyMax = new CombinedMetric(new MaxMetric()); _fetchAPILatencyMean = new ReducedMetric(new MeanReducer()); _fetchAPICallCount = new CountMetric(); _fetchAPIMessageCount = new CountMetric(); } public Map getMetricsDataMap() { Map ret = new HashMap(); ret.put(_partition + "/fetchAPILatencyMax", _fetchAPILatencyMax.getValueAndReset()); ret.put(_partition + "/fetchAPILatencyMean", _fetchAPILatencyMean.getValueAndReset()); ret.put(_partition + "/fetchAPICallCount", _fetchAPICallCount.getValueAndReset()); ret.put(_partition + "/fetchAPIMessageCount", _fetchAPIMessageCount.getValueAndReset()); return ret; } //returns false if it's reached the end of current batch public EmitState next(SpoutOutputCollector collector) { if (_waitingToEmit.isEmpty()) { fill(); } while (true) { MessageAndRealOffset toEmit = _waitingToEmit.pollFirst(); if (toEmit == null) { return EmitState.NO_EMITTED; } Iterable<List<Object>> tups = KafkaUtils.generateTuples(_spoutConfig, toEmit.msg); if (tups != null) { for (List<Object> tup : tups) { collector.emit(tup, new KafkaMessageId(_partition, toEmit.offset)); } break; } else { ack(toEmit.offset); } } if (!_waitingToEmit.isEmpty()) { return EmitState.EMITTED_MORE_LEFT; } else { return EmitState.EMITTED_END; } } private void fill() { long start = System.nanoTime(); Long offset; // Are there failed tuples? If so, fetch those first. offset = this._failedMsgRetryManager.nextFailedMessageToRetry(); final boolean processingNewTuples = (offset == null); if (processingNewTuples) { offset = _emittedToOffset; } ByteBufferMessageSet msgs = null; try { msgs = KafkaUtils.fetchMessages(_spoutConfig, _consumer, _partition, offset); } catch (TopicOffsetOutOfRangeException e) { _emittedToOffset = KafkaUtils.getOffset(_consumer, _spoutConfig.topic, _partition.partition, kafka.api.OffsetRequest.EarliestTime()); LOG.warn("Using new offset: {}", _emittedToOffset); // fetch failed, so don't update the metrics return; } long end = System.nanoTime(); long millis = (end - start) / 1000000; _fetchAPILatencyMax.update(millis); _fetchAPILatencyMean.update(millis); _fetchAPICallCount.incr(); if (msgs != null) { int numMessages = 0; for (MessageAndOffset msg : msgs) { final Long cur_offset = msg.offset(); if (cur_offset < offset) { // Skip any old offsets. continue; } if (processingNewTuples || this._failedMsgRetryManager.shouldRetryMsg(cur_offset)) { numMessages += 1; if (!_pending.containsKey(cur_offset)) { _pending.put(cur_offset, System.currentTimeMillis()); } _waitingToEmit.add(new MessageAndRealOffset(msg.message(), cur_offset)); _emittedToOffset = Math.max(msg.nextOffset(), _emittedToOffset); if (_failedMsgRetryManager.shouldRetryMsg(cur_offset)) { this._failedMsgRetryManager.retryStarted(cur_offset); } } } _fetchAPIMessageCount.incrBy(numMessages); } } public void ack(Long offset) { if (!_pending.isEmpty() && _pending.firstKey() < offset - _spoutConfig.maxOffsetBehind) { // Too many things pending! _pending.headMap(offset - _spoutConfig.maxOffsetBehind).clear(); } _pending.remove(offset); this._failedMsgRetryManager.acked(offset); numberAcked++; } public void fail(Long offset) { if (offset < _emittedToOffset - _spoutConfig.maxOffsetBehind) { LOG.info( "Skipping failed tuple at offset=" + offset + " because it's more than maxOffsetBehind=" + _spoutConfig.maxOffsetBehind + " behind _emittedToOffset=" + _emittedToOffset ); } else { LOG.debug("failing at offset=" + offset + " with _pending.size()=" + _pending.size() + " pending and _emittedToOffset=" + _emittedToOffset); numberFailed++; if (numberAcked == 0 && numberFailed > _spoutConfig.maxOffsetBehind) { throw new RuntimeException("Too many tuple failures"); } this._failedMsgRetryManager.failed(offset); } } public void commit() { long lastCompletedOffset = lastCompletedOffset(); if (_committedTo != lastCompletedOffset) { LOG.debug("Writing last completed offset (" + lastCompletedOffset + ") to ZK for " + _partition + " for topology: " + _topologyInstanceId); Map<Object, Object> data = (Map<Object, Object>) ImmutableMap.builder() .put("topology", ImmutableMap.of("id", _topologyInstanceId, "name", _stormConf.get(Config.TOPOLOGY_NAME))) .put("offset", lastCompletedOffset) .put("partition", _partition.partition) .put("broker", ImmutableMap.of("host", _partition.host.host, "port", _partition.host.port)) .put("topic", _spoutConfig.topic).build(); _state.writeJSON(committedPath(), data); _committedTo = lastCompletedOffset; LOG.debug("Wrote last completed offset (" + lastCompletedOffset + ") to ZK for " + _partition + " for topology: " + _topologyInstanceId); } else { LOG.debug("No new offset for " + _partition + " for topology: " + _topologyInstanceId); } } private String committedPath() { return _spoutConfig.zkRoot + "/" + _spoutConfig.id + "/" + _partition.getId(); } public long lastCompletedOffset() { if (_pending.isEmpty()) { return _emittedToOffset; } else { return _pending.firstKey(); } } public Partition getPartition() { return _partition; } public void close() { _connections.unregister(_partition.host, _partition.partition); } static class KafkaMessageId { public Partition partition; public long offset; public KafkaMessageId(Partition partition, long offset) { this.partition = partition; this.offset = offset; } } }
/* * Copyright 2017 Young Digital Planet S.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.ydp.empiria.player.client.controller; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import com.google.inject.assistedinject.Assisted; import eu.ydp.empiria.player.client.controller.body.IPlayerContainersAccessor; import eu.ydp.empiria.player.client.controller.communication.DisplayContentOptions; import eu.ydp.empiria.player.client.controller.communication.ItemData; import eu.ydp.empiria.player.client.controller.communication.sockets.ItemInterferenceSocket; import eu.ydp.empiria.player.client.controller.events.activity.FlowActivityEvent; import eu.ydp.empiria.player.client.controller.events.activity.FlowActivityEventType; import eu.ydp.empiria.player.client.controller.events.interaction.StateChangedInteractionEvent; import eu.ydp.empiria.player.client.controller.flow.processing.events.ActivityProcessingEvent; import eu.ydp.empiria.player.client.controller.log.OperationLogEvent; import eu.ydp.empiria.player.client.controller.log.OperationLogManager; import eu.ydp.empiria.player.client.controller.session.sockets.ItemSessionSocket; import eu.ydp.empiria.player.client.gin.factory.PageScopeFactory; import eu.ydp.empiria.player.client.gin.scopes.page.PageScoped; import eu.ydp.empiria.player.client.module.core.base.ParenthoodSocket; import eu.ydp.empiria.player.client.util.events.internal.bus.EventsBus; import eu.ydp.empiria.player.client.util.events.internal.page.PageEvent; import eu.ydp.empiria.player.client.util.events.internal.page.PageEventHandler; import eu.ydp.empiria.player.client.util.events.internal.page.PageEventTypes; import eu.ydp.empiria.player.client.util.events.internal.player.PlayerEvent; import eu.ydp.empiria.player.client.util.events.internal.player.PlayerEventTypes; import eu.ydp.empiria.player.client.util.events.internal.scope.CurrentPageScope; import eu.ydp.empiria.player.client.util.events.internal.state.StateChangeEvent; import eu.ydp.empiria.player.client.util.events.internal.state.StateChangeEventHandler; import eu.ydp.empiria.player.client.util.events.internal.state.StateChangeEventTypes; import eu.ydp.empiria.player.client.view.item.ItemViewCarrier; import eu.ydp.empiria.player.client.view.item.ItemViewSocket; import static eu.ydp.empiria.player.client.util.events.internal.state.StateChangeEventTypes.OUTCOME_STATE_CHANGED; public class ItemController implements PageEventHandler, StateChangeEventHandler { private final ItemStyleNameConstants styleNames; private final EventsBus eventsBus; private final ItemData data; private final IPlayerContainersAccessor accessor; private final PageScopeFactory pageScopeFactory; private final AssessmentControllerFactory controllerFactory; private final ItemViewSocket itemViewSocket; private final ItemSessionSocket itemSessionSocket; Item item; private int itemIndex; @Inject public ItemController(@Assisted ItemViewSocket itemViewSocket, @Assisted ItemSessionSocket itemSessionSocket, @PageScoped ItemData data, IPlayerContainersAccessor accessor, ItemStyleNameConstants styleNames, EventsBus eventsBus, PageScopeFactory pageScopeFactory, AssessmentControllerFactory controllerFactory) { this.itemViewSocket = itemViewSocket; this.itemSessionSocket = itemSessionSocket; this.data = data; this.accessor = accessor; this.styleNames = styleNames; this.eventsBus = eventsBus; this.pageScopeFactory = pageScopeFactory; this.controllerFactory = controllerFactory; } public void init(DisplayContentOptions options) { try { // Rejestrowanie na wszystkie eventy Page dawniej FLOW CurrentPageScope currentPageScope = pageScopeFactory.getCurrentPageScope(); eventsBus.addHandler(PageEvent.getTypes(PageEventTypes.values()), this, currentPageScope); eventsBus.addHandler(StateChangeEvent.getType(StateChangeEventTypes.STATE_CHANGED), this, currentPageScope); if (data.getData() == null) { throw new Exception("Item data is null");// NOPMD } itemIndex = data.itemIndex; item = controllerFactory.getItem(options, itemSessionSocket.getOutcomeVariablesMap(itemIndex), itemSessionSocket.getState(itemIndex)); accessor.registerItemBodyContainer(itemIndex, item.getContentView()); itemViewSocket.setItemView(getItemViewCarrier(item, data, options.useSkin())); item.setUp(); item.start(); eventsBus.fireEvent(new PlayerEvent(PlayerEventTypes.PAGE_CREATED_AND_STARTED), currentPageScope); } catch (Exception e) { item = null; itemViewSocket.setItemView(new ItemViewCarrier(data.errorMessage.length() > 0 ? data.errorMessage : e.getClass().getName() + "<br/>" + e.getMessage() + "<br/>" + e.getStackTrace())); OperationLogManager.logEvent(OperationLogEvent.DISPLAY_ITEM_FAILED); e.printStackTrace(); } } public void onShow() { item.onShow(); } public void close() { if (item != null) { item.close(); itemSessionSocket.setState(itemIndex, item.getState()); // FIXME dorobic derejestracje ? czy mechanizm na poziomie eventsBus // interactionSocket.removeStateChangedInteractionEventsListener(this); } } @Override public void onStateChange(StateChangeEvent event) { if (event.getType() == StateChangeEventTypes.STATE_CHANGED && event.getValue() instanceof StateChangedInteractionEvent) { StateChangedInteractionEvent scie = event.getValue(); item.process(scie.isUserInteract(), scie.isReset(), scie.getSender()); CurrentPageScope eventScope = pageScopeFactory.getCurrentPageScope(); eventsBus.fireEvent(new StateChangeEvent(OUTCOME_STATE_CHANGED, scie), eventScope); } } public void checkItem() { if (item != null) { item.checkItem(); } } public ItemInterferenceSocket getItemSocket() { return item; } protected ItemViewCarrier getItemViewCarrier(Item item, ItemData itemData, boolean useSkin) { ItemViewCarrier carrier; if (useSkin) { carrier = new ItemViewCarrier(item.getContentView()); } else { String index = String.valueOf(itemData.itemIndex + 1); Widget titleWidget = createTitleWidget(index, item.getTitle()); carrier = new ItemViewCarrier(titleWidget, item.getContentView(), item.getScoreView()); } return carrier; } protected Widget createTitleWidget(String index, String text) { Panel titlePanel = new FlowPanel(); titlePanel.setStyleName(styleNames.QP_ITEM_TITLE()); Label indexLabel = new Label(index + ". "); indexLabel.setStyleName(styleNames.QP_ITEM_TITLE_INDEX()); Label textLabel = new Label(text); textLabel.setStyleName(styleNames.QP_ITEM_TITLE_TEXT()); titlePanel.add(indexLabel); titlePanel.add(textLabel); return titlePanel; } public void setAssessmentParenthoodSocket(ParenthoodSocket parenthoodSocket) { if (item != null) { item.setAssessmentParenthoodSocket(parenthoodSocket); } } @Override public void onPageEvent(PageEvent event) { // wymuszone kompatibilnoscia wsteczna FlowActivityEvent newEvent; if (event.getValue() instanceof ActivityProcessingEvent) { newEvent = new FlowActivityEvent(FlowActivityEventType.valueOf(event.getType().name()), ((ActivityProcessingEvent) event.getValue()).getGroupIdentifier()); } else { newEvent = new FlowActivityEvent(FlowActivityEventType.valueOf(event.getType().name()), null); } item.handleFlowActivityEvent(newEvent); } /** * Checks whether the item body contains at least one interactive module * * @return boolean */ public boolean hasInteractiveModules() { return (item != null && item.hasInteractiveModules()); } public void resetItem() { item.resetItem(); } }
/** * Copyright 2011, Big Switch Networks, Inc. * Originally created by David Erickson, Stanford University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package net.floodlightcontroller.counter; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.counter.CounterValue.CounterType; import net.floodlightcontroller.packet.Ethernet; import net.floodlightcontroller.packet.IPv4; import org.openflow.protocol.OFMessage; import org.openflow.protocol.OFPacketIn; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Implements a central store for system counters. These counters include * overall packet-in, packet-out, and flow-mod counters. Additional packet-in * counters are maintained for bcast/unicast/multicast traffic, as well as counters * for traffic types based on ethertype and ip-proto (maintained on a per switch * and controller level). These counters are maintained without the involvement of * any other module in the system. For per-module counters and other detailed * debug services, consider IDebugCounterService. * * @authors Kyle, Kanzhe, Mandeep and Saurav */ public class CounterStore implements IFloodlightModule, ICounterStoreService { protected static Logger log = LoggerFactory.getLogger(CounterStore.class); public enum NetworkLayer { L2, L3, L4 } protected class CounterEntry { protected ICounter counter; String title; } protected class MutableInt { int value = 0; public void increment() { value += 1; } public int get() { return value; } public void set(int val) { value = val; } } protected class CounterKeyTuple { byte msgType; long dpid; short l3type; byte l4type; public CounterKeyTuple(byte msgType, long dpid, short l3type, byte l4type){ this.msgType = msgType; this.dpid = dpid; this.l3type = l3type; this.l4type = l4type; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (!(obj instanceof CounterKeyTuple)) return false; CounterKeyTuple other = (CounterKeyTuple) obj; if (this.msgType == other.msgType && this.dpid == other.dpid && this.l3type == other.l3type && this.l4type == other.l4type) return true; return false; } @Override public int hashCode() { final int prime = 283; int result = 1; result = prime * result + msgType; result = prime * result + (int) (dpid ^ (dpid >>> 32)); result = prime * result + l3type; result = prime * result + l4type; return result; } } /** * Counter storage across all threads. These are periodically updated from the * local per thread counters by the updateFlush method. */ protected ConcurrentHashMap<CounterKeyTuple, List<ICounter>> pktinCounters = new ConcurrentHashMap<CounterKeyTuple, List<ICounter>>(); protected ConcurrentHashMap<CounterKeyTuple, List<ICounter>> pktoutCounters = new ConcurrentHashMap<CounterKeyTuple, List<ICounter>>(); /** * Thread local counter stores */ protected final ThreadLocal<Map<CounterKeyTuple,MutableInt>> pktin_local_buffer = new ThreadLocal<Map<CounterKeyTuple,MutableInt>>() { @Override protected Map<CounterKeyTuple,MutableInt> initialValue() { return new HashMap<CounterKeyTuple,MutableInt>(); } }; protected final ThreadLocal<Map<CounterKeyTuple,MutableInt>> pktout_local_buffer = new ThreadLocal<Map<CounterKeyTuple,MutableInt>>() { @Override protected Map<CounterKeyTuple,MutableInt> initialValue() { return new HashMap<CounterKeyTuple,MutableInt>(); } }; /** * A cache of counterName --> Counter used to retrieve counters quickly via * string-counter-keys */ protected ConcurrentHashMap<String, CounterEntry> nameToCEIndex = new ConcurrentHashMap<String, CounterEntry>(); /** * Counter Categories grouped by network layers * NetworkLayer -> CounterToCategories */ protected static Map<NetworkLayer, Map<String, List<String>>> layeredCategories = new ConcurrentHashMap<NetworkLayer, Map<String, List<String>>> (); //******************************* // ICounterStoreService //******************************* @Override public void updatePacketInCountersLocal(IOFSwitch sw, OFMessage m, Ethernet eth) { if (((OFPacketIn)m).getPacketData().length <= 0) { return; } CounterKeyTuple countersKey = this.getCountersKey(sw, m, eth); Map<CounterKeyTuple, MutableInt> pktin_buffer = this.pktin_local_buffer.get(); MutableInt currval = pktin_buffer.get(countersKey); if (currval == null) { this.createPacketInCounters(sw, m, eth); // create counters as side effect (if required) currval = new MutableInt(); pktin_buffer.put(countersKey, currval); } currval.increment(); return; } @Override public void updatePktOutFMCounterStoreLocal(IOFSwitch sw, OFMessage m) { CounterKeyTuple countersKey = this.getCountersKey(sw, m, null); Map<CounterKeyTuple, MutableInt> pktout_buffer = this.pktout_local_buffer.get(); MutableInt currval = pktout_buffer.get(countersKey); if (currval == null) { this.getPktOutFMCounters(sw, m); // create counters as side effect (if required) currval = new MutableInt(); pktout_buffer.put(countersKey, currval); } currval.increment(); return; } @Override public void updateFlush() { Date date = new Date(); Map<CounterKeyTuple, MutableInt> pktin_buffer = this.pktin_local_buffer.get(); for (CounterKeyTuple key : pktin_buffer.keySet()) { MutableInt currval = pktin_buffer.get(key); int delta = currval.get(); if (delta > 0) { List<ICounter> counters = this.pktinCounters.get(key); if (counters != null) { for (ICounter c : counters) { c.increment(date, delta); } } } } // We could do better "GC" of counters that have not been update "recently" pktin_buffer.clear(); Map<CounterKeyTuple, MutableInt> pktout_buffer = this.pktout_local_buffer.get(); for (CounterKeyTuple key : pktout_buffer.keySet()) { MutableInt currval = pktout_buffer.get(key); int delta = currval.get(); if (delta > 0) { List<ICounter> counters = this.pktoutCounters.get(key); if (counters != null) { for (ICounter c : counters) { c.increment(date, delta); } } } } // We could do better "GC" of counters that have not been update "recently" pktout_buffer.clear(); } @Override public ICounter createCounter(String key, CounterValue.CounterType type) { CounterEntry ce; ICounter c; c = SimpleCounter.createCounter(new Date(), type); ce = new CounterEntry(); ce.counter = c; ce.title = key; nameToCEIndex.putIfAbsent(key, ce); return nameToCEIndex.get(key).counter; } @Override public ICounter getCounter(String key) { CounterEntry counter = nameToCEIndex.get(key); if (counter != null) { return counter.counter; } else { return null; } } /* (non-Javadoc) * @see net.floodlightcontroller.counter.ICounterStoreService#getAll() */ @Override public Map<String, ICounter> getAll() { Map<String, ICounter> ret = new ConcurrentHashMap<String, ICounter>(); for(Map.Entry<String, CounterEntry> counterEntry : this.nameToCEIndex.entrySet()) { String key = counterEntry.getKey(); ICounter counter = counterEntry.getValue().counter; ret.put(key, counter); } return ret; } @Override public List<String> getAllCategories(String counterName, NetworkLayer layer) { if (layeredCategories.containsKey(layer)) { Map<String, List<String>> counterToCategories = layeredCategories.get(layer); if (counterToCategories.containsKey(counterName)) { return counterToCategories.get(counterName); } } return null; } /** * Create a title based on switch ID, portID, vlanID, and counterName * If portID is -1, the title represents the given switch only * If portID is a non-negative number, the title represents the port on the given switch */ public static String createCounterName(String switchID, int portID, String counterName) { if (portID < 0) { return switchID + TitleDelimitor + counterName; } else { return switchID + TitleDelimitor + portID + TitleDelimitor + counterName; } } //******************************* // Internal Methods //******************************* protected CounterKeyTuple getCountersKey(IOFSwitch sw, OFMessage m, Ethernet eth) { byte mtype = m.getType().getTypeValue(); short l3type = 0; byte l4type = 0; if (eth != null) { l3type = eth.getEtherType(); if (l3type == (short)0x0800) { IPv4 ipV4 = (IPv4)eth.getPayload(); l4type = ipV4.getProtocol(); } } return new CounterKeyTuple(mtype, sw.getId(), l3type, l4type); } protected List<ICounter> createPacketInCounters(IOFSwitch sw, OFMessage m, Ethernet eth) { /* If possible, find and return counters for this tuple */ CounterKeyTuple countersKey = this.getCountersKey(sw, m, eth); List<ICounter> counters = this.pktinCounters.get(countersKey); if (counters != null) { return counters; } /* * Create the required counters */ counters = new ArrayList<ICounter>(); int l3type = eth.getEtherType() & 0xffff; String switchIdHex = sw.getStringId(); String etherType = String.format("%04x", eth.getEtherType()); String packetName = m.getType().toClass().getName(); packetName = packetName.substring(packetName.lastIndexOf('.')+1); // L2 Type String l2Type = null; if (eth.isBroadcast()) { l2Type = BROADCAST; } else if (eth.isMulticast()) { l2Type = MULTICAST; } else { l2Type = UNICAST; } /* * Use alias for L3 type * Valid EtherType must be greater than or equal to 0x0600 * It is V1 Ethernet Frame if EtherType < 0x0600 */ if (l3type < 0x0600) { etherType = "0599"; } if (TypeAliases.l3TypeAliasMap != null && TypeAliases.l3TypeAliasMap.containsKey(etherType)) { etherType = TypeAliases.l3TypeAliasMap.get(etherType); } else { etherType = "L3_" + etherType; } // overall controller packet counter names String controllerCounterName = CounterStore.createCounterName( CONTROLLER_NAME, -1, packetName); counters.add(createCounter(controllerCounterName, CounterType.LONG)); String switchCounterName = CounterStore.createCounterName( switchIdHex, -1, packetName); counters.add(createCounter(switchCounterName, CounterType.LONG)); // L2 counter names String controllerL2CategoryCounterName = CounterStore.createCounterName( CONTROLLER_NAME, -1, packetName, l2Type, NetworkLayer.L2); counters.add(createCounter(controllerL2CategoryCounterName, CounterType.LONG)); String switchL2CategoryCounterName = CounterStore.createCounterName( switchIdHex, -1, packetName, l2Type, NetworkLayer.L2); counters.add(createCounter(switchL2CategoryCounterName, CounterType.LONG)); // L3 counter names String controllerL3CategoryCounterName = CounterStore.createCounterName( CONTROLLER_NAME, -1, packetName, etherType, NetworkLayer.L3); counters.add(createCounter(controllerL3CategoryCounterName, CounterType.LONG)); String switchL3CategoryCounterName = CounterStore.createCounterName( switchIdHex, -1, packetName, etherType, NetworkLayer.L3); counters.add(createCounter(switchL3CategoryCounterName, CounterType.LONG)); // L4 counters if (l3type == (short)0x0800) { // resolve protocol alias IPv4 ipV4 = (IPv4)eth.getPayload(); String l4name = String.format("%02x", ipV4.getProtocol()); if (TypeAliases.l4TypeAliasMap != null && TypeAliases.l4TypeAliasMap.containsKey(l4name)) { l4name = TypeAliases.l4TypeAliasMap.get(l4name); } else { l4name = "L4_" + l4name; } // create counters String controllerL4CategoryCounterName = CounterStore.createCounterName( CONTROLLER_NAME, -1, packetName, l4name, NetworkLayer.L4); counters.add(createCounter(controllerL4CategoryCounterName, CounterType.LONG)); String switchL4CategoryCounterName = CounterStore.createCounterName( switchIdHex, -1, packetName, l4name, NetworkLayer.L4); counters.add(createCounter(switchL4CategoryCounterName, CounterType.LONG)); } /* Add to map and return */ this.pktinCounters.putIfAbsent(countersKey, counters); return this.pktinCounters.get(countersKey); } protected List<ICounter> getPktOutFMCounters(IOFSwitch sw, OFMessage m) { /* If possible, find and return counters for this tuple */ CounterKeyTuple countersKey = this.getCountersKey(sw, m, null); List<ICounter> counters = this.pktoutCounters.get(countersKey); if (counters != null) { return counters; } /* * Create the required counters */ counters = new ArrayList<ICounter>(); /* String values for names */ String switchIdHex = sw.getStringId(); String packetName = m.getType().toClass().getName(); packetName = packetName.substring(packetName.lastIndexOf('.')+1); String controllerFMCounterName = CounterStore.createCounterName( CONTROLLER_NAME, -1, packetName); counters.add(createCounter(controllerFMCounterName, CounterValue.CounterType.LONG)); String switchFMCounterName = CounterStore.createCounterName( switchIdHex, -1, packetName); counters.add(createCounter(switchFMCounterName, CounterValue.CounterType.LONG)); /* Add to map and return */ this.pktoutCounters.putIfAbsent(countersKey, counters); return this.pktoutCounters.get(countersKey); } /** * Create a title based on switch ID, portID, vlanID, counterName, and subCategory * If portID is -1, the title represents the given switch only * If portID is a non-negative number, the title represents the port on the given switch * For example: PacketIns can be further categorized based on L2 etherType or L3 protocol */ protected static String createCounterName(String switchID, int portID, String counterName, String subCategory, NetworkLayer layer) { String fullCounterName = ""; String groupCounterName = ""; if (portID < 0) { groupCounterName = switchID + TitleDelimitor + counterName; fullCounterName = groupCounterName + TitleDelimitor + subCategory; } else { groupCounterName = switchID + TitleDelimitor + portID + TitleDelimitor + counterName; fullCounterName = groupCounterName + TitleDelimitor + subCategory; } Map<String, List<String>> counterToCategories; if (layeredCategories.containsKey(layer)) { counterToCategories = layeredCategories.get(layer); } else { counterToCategories = new ConcurrentHashMap<String, List<String>> (); layeredCategories.put(layer, counterToCategories); } List<String> categories; if (counterToCategories.containsKey(groupCounterName)) { categories = counterToCategories.get(groupCounterName); } else { categories = new ArrayList<String>(); counterToCategories.put(groupCounterName, categories); } if (!categories.contains(subCategory)) { categories.add(subCategory); } return fullCounterName; } //******************************* // IFloodlightProvider //******************************* @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> services = new ArrayList<Class<? extends IFloodlightService>>(1); services.add(ICounterStoreService.class); return services; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(ICounterStoreService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { // no-op, no dependencies return null; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { // no-op for now } @Override public void startUp(FloodlightModuleContext context) { // no-op for now } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.streaming; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; import java.util.Collection; import java.util.Collections; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelOutboundHandlerAdapter; import io.netty.channel.ChannelPromise; import io.netty.channel.embedded.EmbeddedChannel; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.db.RowUpdateBuilder; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.compaction.OperationType; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.dht.ByteOrderedPartitioner; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.sstable.IndexSummaryManager; import org.apache.cassandra.io.sstable.IndexSummaryRedistribution; import org.apache.cassandra.io.sstable.SSTableUtils; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.util.DataInputBuffer; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.locator.RangesAtEndpoint; import org.apache.cassandra.net.AsyncStreamingOutputPlus; import org.apache.cassandra.net.BufferPoolAllocator; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.net.SharedDefaultFileRegion; import org.apache.cassandra.schema.KeyspaceParams; import org.apache.cassandra.schema.MigrationManager; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.streaming.DefaultConnectionFactory; import org.apache.cassandra.streaming.OutgoingStream; import org.apache.cassandra.streaming.PreviewKind; import org.apache.cassandra.streaming.SessionInfo; import org.apache.cassandra.streaming.StreamCoordinator; import org.apache.cassandra.streaming.StreamOperation; import org.apache.cassandra.streaming.StreamResultFuture; import org.apache.cassandra.streaming.StreamSession; import org.apache.cassandra.streaming.StreamSummary; import org.apache.cassandra.streaming.messages.StreamMessageHeader; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.Throwables; import org.jboss.byteman.contrib.bmunit.BMRule; import org.jboss.byteman.contrib.bmunit.BMUnitRunner; import static org.apache.cassandra.service.ActiveRepairService.NO_PENDING_REPAIR; import static org.junit.Assert.assertTrue; @RunWith(BMUnitRunner.class) public class EntireSSTableStreamConcurrentComponentMutationTest { public static final String KEYSPACE = "CassandraEntireSSTableStreamLockTest"; public static final String CF_STANDARD = "Standard1"; private static final Callable<?> NO_OP = () -> null; private static SSTableReader sstable; private static Descriptor descriptor; private static ColumnFamilyStore store; private static RangesAtEndpoint rangesAtEndpoint; private static ExecutorService service; private static CountDownLatch latch = new CountDownLatch(1); @BeforeClass public static void defineSchemaAndPrepareSSTable() { SchemaLoader.prepareServer(); SchemaLoader.createKeyspace(KEYSPACE, KeyspaceParams.simple(1), SchemaLoader.standardCFMD(KEYSPACE, CF_STANDARD)); Keyspace keyspace = Keyspace.open(KEYSPACE); store = keyspace.getColumnFamilyStore("Standard1"); // insert data and compact to a single sstable CompactionManager.instance.disableAutoCompaction(); for (int j = 0; j < 10; j++) { new RowUpdateBuilder(store.metadata(), j, String.valueOf(j)) .clustering("0") .add("val", ByteBufferUtil.EMPTY_BYTE_BUFFER) .build() .applyUnsafe(); } store.forceBlockingFlush(); CompactionManager.instance.performMaximal(store, false); Token start = ByteOrderedPartitioner.instance.getTokenFactory().fromString(Long.toHexString(0)); Token end = ByteOrderedPartitioner.instance.getTokenFactory().fromString(Long.toHexString(100)); rangesAtEndpoint = RangesAtEndpoint.toDummyList(Collections.singleton(new Range<>(start, end))); service = Executors.newFixedThreadPool(2); } @AfterClass public static void cleanup() { service.shutdown(); } @Before public void init() { sstable = store.getLiveSSTables().iterator().next(); descriptor = sstable.descriptor; } @After public void reset() throws IOException { latch = new CountDownLatch(1); // reset repair info to avoid test interfering each other descriptor.getMetadataSerializer().mutateRepairMetadata(descriptor, 0, ActiveRepairService.NO_PENDING_REPAIR, false); } @Test public void testStream() throws Exception { testStreamWithConcurrentComponentMutation(NO_OP, NO_OP); } /** * Entire-sstable-streaming receiver will throw checksum validation failure because concurrent stats metadata * update causes the actual transfered file size to be different from the one in {@link ComponentManifest} */ @Test public void testStreamWithStatsMutation() throws Exception { testStreamWithConcurrentComponentMutation(() -> { Descriptor desc = sstable.descriptor; desc.getMetadataSerializer().mutate(desc, "testing", stats -> stats.mutateRepairedMetadata(0, UUID.randomUUID(), false)); return null; }, NO_OP); } @Test @BMRule(name = "Delay saving index summary, manifest may link partially written file if there is no lock", targetClass = "SSTableReader", targetMethod = "saveSummary(Descriptor, DecoratedKey, DecoratedKey, IndexSummary)", targetLocation = "AFTER INVOKE serialize", condition = "$descriptor.cfname.contains(\"Standard1\")", action = "org.apache.cassandra.db.streaming.EntireSSTableStreamConcurrentComponentMutationTest.countDown();Thread.sleep(5000);") public void testStreamWithIndexSummaryRedistributionDelaySavingSummary() throws Exception { testStreamWithConcurrentComponentMutation(() -> { // wait until new index summary is partially written latch.await(1, TimeUnit.MINUTES); return null; }, this::indexSummaryRedistribution); } // used by byteman private static void countDown() { latch.countDown(); } private void testStreamWithConcurrentComponentMutation(Callable<?> runBeforeStreaming, Callable<?> runConcurrentWithStreaming) throws Exception { ByteBuf serializedFile = Unpooled.buffer(8192); InetAddressAndPort peer = FBUtilities.getBroadcastAddressAndPort(); StreamSession session = setupStreamingSessionForTest(); Collection<OutgoingStream> outgoingStreams = store.getStreamManager().createOutgoingStreams(session, rangesAtEndpoint, NO_PENDING_REPAIR, PreviewKind.NONE); CassandraOutgoingFile outgoingFile = (CassandraOutgoingFile) Iterables.getOnlyElement(outgoingStreams); Future<?> streaming = executeAsync(() -> { runBeforeStreaming.call(); try (AsyncStreamingOutputPlus out = new AsyncStreamingOutputPlus(createMockNettyChannel(serializedFile))) { outgoingFile.write(session, out, MessagingService.current_version); assertTrue(sstable.descriptor.getTemporaryFiles().isEmpty()); } return null; }); Future<?> concurrentMutations = executeAsync(runConcurrentWithStreaming); streaming.get(3, TimeUnit.MINUTES); concurrentMutations.get(3, TimeUnit.MINUTES); session.prepareReceiving(new StreamSummary(sstable.metadata().id, 1, 5104)); StreamMessageHeader messageHeader = new StreamMessageHeader(sstable.metadata().id, peer, session.planId(), false, 0, 0, 0, null); try (DataInputBuffer in = new DataInputBuffer(serializedFile.nioBuffer(), false)) { CassandraStreamHeader header = CassandraStreamHeader.serializer.deserialize(in, MessagingService.current_version); CassandraEntireSSTableStreamReader reader = new CassandraEntireSSTableStreamReader(messageHeader, header, session); SSTableReader streamedSSTable = Iterables.getOnlyElement(reader.read(in).finished()); SSTableUtils.assertContentEquals(sstable, streamedSSTable); } } private boolean indexSummaryRedistribution() throws IOException { long nonRedistributingOffHeapSize = 0; long memoryPoolBytes = 1024 * 1024; // rewrite index summary file with new min/max index interval TableMetadata origin = store.metadata(); MigrationManager.announceTableUpdate(origin.unbuild().minIndexInterval(1).maxIndexInterval(2).build(), true); try (LifecycleTransaction txn = store.getTracker().tryModify(sstable, OperationType.INDEX_SUMMARY)) { IndexSummaryManager.redistributeSummaries(new IndexSummaryRedistribution(ImmutableMap.of(store.metadata().id, txn), nonRedistributingOffHeapSize, memoryPoolBytes)); } // reset min/max index interval MigrationManager.announceTableUpdate(origin, true); return true; } private Future<?> executeAsync(Callable<?> task) { return service.submit(() -> { try { task.call(); } catch (Exception e) { throw Throwables.unchecked(e); } }); } private EmbeddedChannel createMockNettyChannel(ByteBuf serializedFile) { WritableByteChannel wbc = new WritableByteChannel() { private boolean isOpen = true; public int write(ByteBuffer src) { int size = src.limit(); serializedFile.writeBytes(src); return size; } public boolean isOpen() { return isOpen; } public void close() { isOpen = false; } }; return new EmbeddedChannel(new ChannelOutboundHandlerAdapter() { @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (msg instanceof BufferPoolAllocator.Wrapped) { ByteBuffer buf = ((BufferPoolAllocator.Wrapped) msg).adopt(); wbc.write(buf); } else { ((SharedDefaultFileRegion) msg).transferTo(wbc, 0); } super.write(ctx, msg, promise); } }); } private StreamSession setupStreamingSessionForTest() { StreamCoordinator streamCoordinator = new StreamCoordinator(StreamOperation.BOOTSTRAP, 1, new DefaultConnectionFactory(), false, false, null, PreviewKind.NONE); StreamResultFuture future = StreamResultFuture.createInitiator(UUID.randomUUID(), StreamOperation.BOOTSTRAP, Collections.emptyList(), streamCoordinator); InetAddressAndPort peer = FBUtilities.getBroadcastAddressAndPort(); streamCoordinator.addSessionInfo(new SessionInfo(peer, 0, peer, Collections.emptyList(), Collections.emptyList(), StreamSession.State.INITIALIZED)); StreamSession session = streamCoordinator.getOrCreateNextSession(peer); session.init(future); return session; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.dataFlow; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInspection.dataFlow.instructions.*; import com.intellij.codeInspection.dataFlow.value.*; import com.intellij.psi.*; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.FactoryMap; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Set; /** * @author peter */ public class StandardInstructionVisitor extends InstructionVisitor { private final Set<BinopInstruction> myReachable = new THashSet<BinopInstruction>(); private final Set<BinopInstruction> myCanBeNullInInstanceof = new THashSet<BinopInstruction>(); private final Set<InstanceofInstruction> myUsefulInstanceofs = new THashSet<InstanceofInstruction>(); private final FactoryMap<MethodCallInstruction, boolean[]> myParametersNotNull = new FactoryMap<MethodCallInstruction, boolean[]>() { @Override protected boolean[] create(MethodCallInstruction key) { final PsiCallExpression callExpression = key.getCallExpression(); final PsiMethod callee = callExpression == null ? null : callExpression.resolveMethod(); if (callee != null) { final PsiParameter[] params = callee.getParameterList().getParameters(); boolean[] result = new boolean[params.length]; for (int i = 0; i < params.length; i++) { result[i] = AnnotationUtil.isAnnotated(params[i], AnnotationUtil.NOT_NULL, false); } return result; } else { return ArrayUtil.EMPTY_BOOLEAN_ARRAY; } } }; private final FactoryMap<MethodCallInstruction, Boolean> myCalleeNullability = new FactoryMap<MethodCallInstruction, Boolean>() { @Override protected Boolean create(MethodCallInstruction key) { final PsiCallExpression callExpression = key.getCallExpression(); if (callExpression instanceof PsiNewExpression) { return Boolean.FALSE; } if (callExpression != null) { final PsiMethod callee = callExpression.resolveMethod(); if (callee != null) { if (AnnotationUtil.isNullable(callee)) { return Boolean.TRUE; } if (AnnotationUtil.isNotNull(callee)) { return Boolean.FALSE; } } } return null; } }; @Override public DfaInstructionState[] visitAssign(AssignInstruction instruction, DataFlowRunner runner, DfaMemoryState memState) { DfaValue dfaSource = memState.pop(); DfaValue dfaDest = memState.pop(); if (dfaDest instanceof DfaVariableValue) { DfaVariableValue var = (DfaVariableValue) dfaDest; final PsiVariable psiVariable = var.getPsiVariable(); if (AnnotationUtil.isAnnotated(psiVariable, AnnotationUtil.NOT_NULL, false)) { if (!memState.applyNotNull(dfaSource)) { onAssigningToNotNullableVariable(instruction, runner); } } memState.setVarValue(var, dfaSource); } memState.push(dfaDest); return nextInstruction(instruction, runner, memState); } protected void onAssigningToNotNullableVariable(AssignInstruction instruction, DataFlowRunner runner) {} @Override public DfaInstructionState[] visitCheckReturnValue(CheckReturnValueInstruction instruction, DataFlowRunner runner, DfaMemoryState memState) { final DfaValue retValue = memState.pop(); if (!memState.checkNotNullable(retValue)) { onNullableReturn(instruction, runner); } return nextInstruction(instruction, runner, memState); } protected void onNullableReturn(CheckReturnValueInstruction instruction, DataFlowRunner runner) {} @Override public DfaInstructionState[] visitFieldReference(FieldReferenceInstruction instruction, DataFlowRunner runner, DfaMemoryState memState) { final DfaValue qualifier = memState.pop(); if (instruction.getExpression().isPhysical() && !memState.applyNotNull(qualifier)) { onInstructionProducesNPE(instruction, runner); return DfaInstructionState.EMPTY_ARRAY; } return nextInstruction(instruction, runner, memState); } protected void onInstructionProducesNPE(FieldReferenceInstruction instruction, DataFlowRunner runner) {} @Override public DfaInstructionState[] visitTypeCast(TypeCastInstruction instruction, DataFlowRunner runner, DfaMemoryState memState) { final DfaValueFactory factory = runner.getFactory(); DfaValue dfaExpr = factory.create(instruction.getCasted()); if (dfaExpr != null) { DfaTypeValue dfaType = factory.getTypeFactory().create(instruction.getCastTo()); DfaRelationValue dfaInstanceof = factory.getRelationFactory().create(dfaExpr, dfaType, "instanceof", false); if (dfaInstanceof != null && !memState.applyInstanceofOrNull(dfaInstanceof)) { onInstructionProducesCCE(instruction, runner); } } return nextInstruction(instruction, runner, memState); } protected void onInstructionProducesCCE(TypeCastInstruction instruction, DataFlowRunner runner) {} @Override public DfaInstructionState[] visitMethodCall(MethodCallInstruction instruction, DataFlowRunner runner, DfaMemoryState memState) { final PsiExpression[] args = instruction.getArgs(); final boolean[] parametersNotNull = myParametersNotNull.get(instruction); final DfaNotNullValue.Factory factory = runner.getFactory().getNotNullFactory(); for (int i = 0; i < args.length; i++) { final DfaValue arg = memState.pop(); final int revIdx = args.length - i - 1; if (args.length <= parametersNotNull.length && revIdx < parametersNotNull.length && parametersNotNull[revIdx] && !memState.applyNotNull(arg)) { onPassingNullParameter(runner, args[revIdx]); if (arg instanceof DfaVariableValue) { memState.setVarValue((DfaVariableValue)arg, factory.create(((DfaVariableValue)arg).getPsiVariable().getType())); } } } @NotNull final DfaValue qualifier = memState.pop(); try { if (!memState.applyNotNull(qualifier)) { if (instruction.getMethodType() == MethodCallInstruction.MethodType.UNBOXING) { onUnboxingNullable(instruction, runner); } else { onInstructionProducesNPE(instruction, runner); } if (qualifier instanceof DfaVariableValue) { memState.setVarValue((DfaVariableValue)qualifier, factory.create(((DfaVariableValue)qualifier).getPsiVariable().getType())); } } return nextInstruction(instruction, runner, memState); } finally { pushResult(instruction, memState, qualifier, runner.getFactory()); if (instruction.shouldFlushFields()) { memState.flushFields(runner); } } } private void pushResult(MethodCallInstruction instruction, DfaMemoryState state, final DfaValue oldValue, DfaValueFactory factory) { final PsiType type = instruction.getResultType(); final MethodCallInstruction.MethodType methodType = instruction.getMethodType(); DfaValue dfaValue = null; if (type != null && (type instanceof PsiClassType || type.getArrayDimensions() > 0)) { @Nullable final Boolean nullability = myCalleeNullability.get(instruction); dfaValue = nullability == Boolean.FALSE ? factory.getNotNullFactory().create(type) : factory.getTypeFactory().create(type, nullability == Boolean.TRUE); } else if (methodType == MethodCallInstruction.MethodType.UNBOXING) { dfaValue = factory.getBoxedFactory().createUnboxed(oldValue); } else if (methodType == MethodCallInstruction.MethodType.BOXING) { dfaValue = factory.getBoxedFactory().createBoxed(oldValue); } else if (methodType == MethodCallInstruction.MethodType.CAST) { if (oldValue instanceof DfaConstValue) { final DfaConstValue constValue = (DfaConstValue)oldValue; Object o = constValue.getValue(); if (o instanceof Double || o instanceof Float) { double dbVal = o instanceof Double ? ((Double)o).doubleValue() : ((Float)o).doubleValue(); // 5.0f == 5 if (Math.floor(dbVal) == dbVal) o = TypeConversionUtil.computeCastTo(o, PsiType.LONG); } else { o = TypeConversionUtil.computeCastTo(o, PsiType.LONG); } dfaValue = factory.getConstFactory().createFromValue(o, type); } else { dfaValue = oldValue; } } state.push(dfaValue == null ? DfaUnknownValue.getInstance() : dfaValue); } protected void onInstructionProducesNPE(MethodCallInstruction instruction, DataFlowRunner runner) {} protected void onUnboxingNullable(MethodCallInstruction instruction, DataFlowRunner runner) {} protected void onPassingNullParameter(DataFlowRunner runner, PsiExpression arg) {} @Override public DfaInstructionState[] visitBinop(BinopInstruction instruction, DataFlowRunner runner, DfaMemoryState memState) { myReachable.add(instruction); final Instruction next = runner.getInstruction(instruction.getIndex() + 1); DfaValue dfaRight = memState.pop(); DfaValue dfaLeft = memState.pop(); final String opSign = instruction.getOperationSign(); if (opSign != null) { final DfaValueFactory factory = runner.getFactory(); if (("==".equals(opSign) || "!=".equals(opSign)) && dfaLeft instanceof DfaConstValue && dfaRight instanceof DfaConstValue) { boolean negated = "!=".equals(opSign) ^ (memState.canBeNaN(dfaLeft) || memState.canBeNaN(dfaRight)); if (dfaLeft == dfaRight ^ negated) { memState.push(factory.getConstFactory().getTrue()); instruction.setTrueReachable(); } else { memState.push(factory.getConstFactory().getFalse()); instruction.setFalseReachable(); } return nextInstruction(instruction, runner, memState); } boolean negated = memState.canBeNaN(dfaLeft) || memState.canBeNaN(dfaRight); DfaRelationValue dfaRelation = factory.getRelationFactory().create(dfaLeft, dfaRight, opSign, negated); if (dfaRelation != null) { myCanBeNullInInstanceof.add(instruction); ArrayList<DfaInstructionState> states = new ArrayList<DfaInstructionState>(); final DfaMemoryState trueCopy = memState.createCopy(); if (trueCopy.applyCondition(dfaRelation)) { trueCopy.push(factory.getConstFactory().getTrue()); instruction.setTrueReachable(); states.add(new DfaInstructionState(next, trueCopy)); } //noinspection UnnecessaryLocalVariable DfaMemoryState falseCopy = memState; if (falseCopy.applyCondition(dfaRelation.createNegated())) { falseCopy.push(factory.getConstFactory().getFalse()); instruction.setFalseReachable(); states.add(new DfaInstructionState(next, falseCopy)); if (instruction instanceof InstanceofInstruction && !falseCopy.isNull(dfaLeft)) { myUsefulInstanceofs.add((InstanceofInstruction)instruction); } } return states.toArray(new DfaInstructionState[states.size()]); } else if ("+".equals(opSign)) { memState.push(instruction.getNonNullStringValue(factory)); instruction.setTrueReachable(); // Not a branching instruction actually. instruction.setFalseReachable(); } else { if (instruction instanceof InstanceofInstruction) { if ((dfaLeft instanceof DfaTypeValue || dfaLeft instanceof DfaNotNullValue) && dfaRight instanceof DfaTypeValue) { final PsiType leftType; if (dfaLeft instanceof DfaNotNullValue) { leftType = ((DfaNotNullValue)dfaLeft).getType(); } else { leftType = ((DfaTypeValue)dfaLeft).getType(); myCanBeNullInInstanceof.add(instruction); } if (!((DfaTypeValue)dfaRight).getType().isAssignableFrom(leftType)) { myUsefulInstanceofs.add((InstanceofInstruction)instruction); } } else { myUsefulInstanceofs.add((InstanceofInstruction)instruction); } } memState.push(DfaUnknownValue.getInstance()); } } else { memState.push(DfaUnknownValue.getInstance()); } return nextInstruction(instruction, runner, memState); } public boolean isInstanceofRedundant(InstanceofInstruction instruction) { return !myUsefulInstanceofs.contains(instruction) && !instruction.isConditionConst() && myReachable.contains(instruction); } public boolean canBeNull(BinopInstruction instruction) { return myCanBeNullInInstanceof.contains(instruction); } }
/** * $Id: PollsEntityProvider.java 127888 2013-07-29 11:54:18Z steve.swinsburg@gmail.com $ * $URL: https://source.sakaiproject.org/svn/polls/tags/sakai-10.1/tool/src/java/org/sakaiproject/poll/tool/entityproviders/PollsEntityProvider.java $ * PollEntityProvider.java - polls - Aug 21, 2008 7:34:47 PM - azeckoski ************************************************************************** * Copyright (c) 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.poll.tool.entityproviders; import java.io.OutputStream; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import javax.servlet.http.HttpServletResponse; import lombok.Setter; import lombok.extern.apachecommons.CommonsLog; import org.apache.commons.beanutils.BeanUtils; import org.apache.commons.lang.StringUtils; import org.sakaiproject.entitybroker.EntityReference; import org.sakaiproject.entitybroker.EntityView; import org.sakaiproject.entitybroker.entityprovider.EntityProvider; import org.sakaiproject.entitybroker.entityprovider.annotations.EntityCustomAction; import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutable; import org.sakaiproject.entitybroker.entityprovider.capabilities.AutoRegisterEntityProvider; import org.sakaiproject.entitybroker.entityprovider.capabilities.CollectionResolvable; import org.sakaiproject.entitybroker.entityprovider.capabilities.Describeable; import org.sakaiproject.entitybroker.entityprovider.capabilities.Outputable; import org.sakaiproject.entitybroker.entityprovider.capabilities.RequestStorable; import org.sakaiproject.entitybroker.entityprovider.extension.Formats; import org.sakaiproject.entitybroker.entityprovider.extension.RequestStorage; import org.sakaiproject.entitybroker.entityprovider.search.Restriction; import org.sakaiproject.entitybroker.entityprovider.search.Search; import org.sakaiproject.entitybroker.exception.EntityException; import org.sakaiproject.entitybroker.util.AbstractEntityProvider; import org.sakaiproject.event.api.UsageSession; import org.sakaiproject.event.api.UsageSessionService; import org.sakaiproject.poll.logic.PollListManager; import org.sakaiproject.poll.logic.PollVoteManager; import org.sakaiproject.poll.model.Option; import org.sakaiproject.poll.model.Poll; import org.sakaiproject.poll.model.Vote; import org.sakaiproject.user.api.UserDirectoryService; /** * Handles the polls tool. * * @author Aaron Zeckoski (azeckoski @ gmail.com) * @author Denny (denny.denny @ gmail.com) */ @CommonsLog public class PollsEntityProvider extends AbstractEntityProvider implements EntityProvider, AutoRegisterEntityProvider, RequestStorable, ActionsExecutable, Outputable, Describeable { @Setter private PollListManager pollListManager; @Setter private PollVoteManager pollVoteManager; @Setter private UsageSessionService usageSessionService; @Setter private UserDirectoryService userDirectoryService; @Setter private RequestStorage requestStorage = null; public static final String ENTITY_PREFIX = "polls"; @Override public String getEntityPrefix() { return ENTITY_PREFIX; } public String[] getHandledOutputFormats() { return new String[] { Formats.XML, Formats.JSON }; } public String[] getHandledInputFormats() { return new String[] { Formats.XML, Formats.JSON, Formats.HTML }; } /** * site/siteId */ @EntityCustomAction(action = "site", viewKey = EntityView.VIEW_LIST) public List<Poll> getPollsForSite(EntityView view) { // get siteId String siteId = view.getPathSegment(2); // check siteId supplied if (StringUtils.isBlank(siteId)) { throw new IllegalArgumentException( "siteId must be set in order to get the polls for a site, via the URL /polls/site/siteId"); } String[] siteIds = new String[] { siteId }; if (log.isDebugEnabled()) { log.debug("poll for site " + siteId); } String userId = developerHelperService.getCurrentUserId(); if (userId == null) { throw new EntityException( "No user is currently logged in so no polls data can be retrieved", siteId, HttpServletResponse.SC_UNAUTHORIZED); } boolean adminControl = false; String perm = PollListManager.PERMISSION_VOTE; if (adminControl) { perm = PollListManager.PERMISSION_ADD; } List<Poll> polls = pollListManager .findAllPollsForUserAndSitesAndPermission(userId, siteIds, perm); if (adminControl) { // add in options for (Poll p : polls) { List<Option> options = pollListManager.getOptionsForPoll(p .getPollId()); p.setOptions(options); } } else { // add in the indicators that this user has replied Long[] pollIds = new Long[polls.size()]; for (int i = 0; i < polls.size(); i++) { pollIds[i] = polls.get(i).getPollId(); } Map<Long, List<Vote>> voteMap = pollVoteManager.getVotesForUser( userId, pollIds); for (Poll poll : polls) { Long pollId = poll.getPollId(); List<Vote> l = voteMap.get(pollId); if (l != null) { poll.setCurrentUserVoted(true); poll.setCurrentUserVotes(l); } else { poll.setCurrentUserVoted(false); } } } return polls; } @EntityCustomAction(action = "my", viewKey = EntityView.VIEW_LIST) public List<?> getEntities(EntityReference ref, Search search) { System.out.println("get entities"); // get the setting which indicates if we are getting polls we can admin // or polls we can take boolean adminControl = false; Restriction adminRes = search.getRestrictionByProperty("admin"); if (adminRes != null) { adminControl = developerHelperService.convert( adminRes.getSingleValue(), boolean.class); } // get the location (if set) Restriction locRes = search .getRestrictionByProperty(CollectionResolvable.SEARCH_LOCATION_REFERENCE); // requestStorage.getStoredValueAsType(String.class, // "siteId"); String[] siteIds = null; if (locRes != null) { String siteId = developerHelperService.getLocationIdFromRef(locRes .getStringValue()); siteIds = new String[] { siteId }; } // get the user (if set) Restriction userRes = search .getRestrictionByProperty(CollectionResolvable.SEARCH_USER_REFERENCE); String userId = null; if (userRes != null) { String currentUser = developerHelperService .getCurrentUserReference(); String userReference = userRes.getStringValue(); if (userReference == null) { throw new IllegalArgumentException( "Invalid request: Cannot limit polls by user when the value is null"); } if (userReference.equals(currentUser) || developerHelperService.isUserAdmin(currentUser)) { userId = developerHelperService.getUserIdFromRef(userReference); // requestStorage.getStoredValueAsType(String.class, // "userId"); } else { throw new SecurityException( "Only the admin can get polls for other users, you requested polls for: " + userReference); } } else { userId = developerHelperService.getCurrentUserId(); if (userId == null) { throw new EntityException( "No user is currently logged in so no polls data can be retrieved", ref.getId(), HttpServletResponse.SC_UNAUTHORIZED); } } String perm = PollListManager.PERMISSION_VOTE; if (adminControl) { perm = PollListManager.PERMISSION_ADD; } List<Poll> polls = pollListManager .findAllPollsForUserAndSitesAndPermission(userId, siteIds, perm); if (adminControl) { // add in options for (Poll p : polls) { List<Option> options = pollListManager.getOptionsForPoll(p .getPollId()); p.setOptions(options); } } else { // add in the indicators that this user has replied Long[] pollIds = new Long[polls.size()]; for (int i = 0; i < polls.size(); i++) { pollIds[i] = polls.get(i).getPollId(); } Map<Long, List<Vote>> voteMap = pollVoteManager.getVotesForUser( userId, pollIds); for (Poll poll : polls) { Long pollId = poll.getPollId(); List<Vote> l = voteMap.get(pollId); if (l != null) { poll.setCurrentUserVoted(true); poll.setCurrentUserVotes(l); } else { poll.setCurrentUserVoted(false); } } } return polls; } /** * @param id * @return */ private Poll getPollById(String id) { Long pollId; try { pollId = Long.valueOf(id); } catch (NumberFormatException e) { throw new IllegalArgumentException("Invalid poll id (" + id + "), the id must be a number"); } Poll poll = pollListManager.getPollById(pollId, false); return poll; } @EntityCustomAction(action = "poll-view", viewKey = EntityView.VIEW_SHOW) public Poll getPollEntity(EntityView view, EntityReference ref) { String id = ref.getId(); log.debug(id); if (StringUtils.isBlank(id)) { log.warn("Poll id is not exist. Returning an empty poll object."); return new Poll(); } Poll poll = getPollById(id); if (poll == null) { throw new IllegalArgumentException( "No poll found for the given reference: " + id); } Long pollId = poll.getPollId(); String currentUserId = developerHelperService.getCurrentUserId(); boolean allowedManage = false; if (!developerHelperService.isEntityRequestInternal(id + "")) { if (!pollListManager.isPollPublic(poll)) { // this is not a public poll? (ie .anon role has poll.vote) String userReference = developerHelperService .getCurrentUserReference(); if (userReference == null) { throw new EntityException( "User must be logged in in order to access poll data", id, HttpServletResponse.SC_UNAUTHORIZED); } allowedManage = developerHelperService .isUserAllowedInEntityReference(userReference, PollListManager.PERMISSION_ADD, "/site/" + poll.getSiteId()); boolean allowedVote = developerHelperService .isUserAllowedInEntityReference(userReference, PollListManager.PERMISSION_VOTE, "/site/" + poll.getSiteId()); if (!allowedManage && !allowedVote) { throw new SecurityException("User (" + userReference + ") not allowed to access poll data: " + id); } } } log.debug(requestStorage == null); Boolean includeVotes = requestStorage.getStoredValueAsType( Boolean.class, "includeVotes"); if (includeVotes == null) { includeVotes = false; } if (includeVotes) { List<Vote> votes = pollVoteManager.getAllVotesForPoll(poll); poll.setVotes(votes); } Boolean includeOptions = requestStorage.getStoredValueAsType( Boolean.class, "includeOptions"); if (includeOptions == null) { includeOptions = false; } if (includeOptions) { List<Option> options = pollListManager.getOptionsForPoll(poll); poll.setOptions(options); } // add in the indicator that this user has replied if (currentUserId != null) { Map<Long, List<Vote>> voteMap = pollVoteManager.getVotesForUser( currentUserId, new Long[] { pollId }); List<Vote> l = voteMap.get(pollId); if (l != null) { poll.setCurrentUserVoted(true); poll.setCurrentUserVotes(l); } else { poll.setCurrentUserVoted(false); } } return poll; } /** * Note that details is the only optional field */ @EntityCustomAction(action = "poll-create", viewKey = EntityView.VIEW_NEW) public String createPollEntity(EntityReference ref, Map<String, Object> params) { Poll poll = new Poll(); // copy from params to Poll copyParamsToObject(params, poll); poll.setCreationDate(new Date()); if (poll.getId() == null) { poll.setId(UUID.randomUUID().toString()); } if (poll.getOwner() == null) { poll.setOwner(developerHelperService.getCurrentUserId()); } String siteId = developerHelperService.getCurrentLocationId(); if (poll.getSiteId() == null) { poll.setSiteId(siteId); } else { siteId = poll.getSiteId(); } String userReference = developerHelperService.getCurrentUserReference(); String location = "/site/" + siteId; boolean allowed = developerHelperService .isUserAllowedInEntityReference(userReference, PollListManager.PERMISSION_ADD, location); if (!allowed) { throw new SecurityException("Current user (" + userReference + ") cannot create polls in location (" + location + ")"); } pollListManager.savePoll(poll); return poll.getPollId() + ""; } @EntityCustomAction(action = "poll-update", viewKey = EntityView.VIEW_EDIT) public void updatePollEntity(EntityReference ref, Map<String, Object> params) { String id = ref.getId(); if (id == null) { throw new IllegalArgumentException( "The reference must include an id for updates (id is currently null)"); } String userReference = developerHelperService.getCurrentUserReference(); if (userReference == null) { throw new SecurityException("anonymous user cannot update poll: " + ref); } Poll current = getPollById(id); if (current == null) { throw new IllegalArgumentException( "No poll found to update for the given reference: " + ref); } Poll poll = new Poll(); copyParamsToObject(params, poll); String siteId = developerHelperService.getCurrentLocationId(); if (poll.getSiteId() == null) { poll.setSiteId(siteId); } else { siteId = poll.getSiteId(); } String location = "/site/" + siteId; // should this check a different permission? boolean allowed = developerHelperService .isUserAllowedInEntityReference(userReference, PollListManager.PERMISSION_ADD, location); if (!allowed) { throw new SecurityException("Current user (" + userReference + ") cannot update polls in location (" + location + ")"); } developerHelperService.copyBean(poll, current, 0, new String[] { "id", "pollId", "owner", "siteId", "creationDate", "reference", "url", "properties" }, true); pollListManager.savePoll(current); } @EntityCustomAction(action = "poll-delete", viewKey = EntityView.VIEW_SHOW) public Object deletePollEntity(EntityReference ref) { String id = ref.getId(); if (id == null) { throw new IllegalArgumentException( "The reference must include an id for deletes (id is currently null)"); } Poll poll = getPollById(id); if (poll == null) { throw new IllegalArgumentException( "No poll found for the given reference: " + ref); } try { pollListManager.deletePoll(poll); return String.format("Poll id %d removed", id); } catch (SecurityException e) { throw new SecurityException("The current user (" + developerHelperService.getCurrentUserReference() + ") is not allowed to delete this poll: " + ref); } } /** * /{pollId}/poll-options */ @EntityCustomAction(action = "poll-option-list", viewKey = EntityView.VIEW_SHOW) public List<?> getPollOptionList(EntityView view, EntityReference ref) { // get the pollId String id = ref.getId(); log.debug(id); // check siteId supplied if (StringUtils.isBlank(id)) { throw new IllegalArgumentException( "siteId must be set in order to get the polls for a site, via the URL /polls/site/siteId"); } Long pollId = null; try { pollId = Long.parseLong(id); } catch (UnsupportedOperationException e) { throw new IllegalArgumentException( "Invalid: pollId must be a long number: " + e.getMessage(), e); } // get the poll Poll poll = pollListManager.getPollById(pollId); if (poll == null) { throw new IllegalArgumentException("pollId (" + pollId + ") is invalid and does not match any known polls"); } else { boolean allowedPublic = pollListManager.isPollPublic(poll); if (!allowedPublic) { String userReference = developerHelperService .getCurrentUserReference(); if (userReference == null) { throw new EntityException( "User must be logged in in order to access poll data", id, HttpServletResponse.SC_UNAUTHORIZED); } else { boolean allowedManage = false; boolean allowedVote = false; allowedManage = developerHelperService .isUserAllowedInEntityReference(userReference, PollListManager.PERMISSION_ADD, "/site/" + poll.getSiteId()); allowedVote = developerHelperService .isUserAllowedInEntityReference(userReference, PollListManager.PERMISSION_VOTE, "/site/" + poll.getSiteId()); if (!(allowedManage || allowedVote)) { throw new SecurityException("User (" + userReference + ") not allowed to access poll data: " + id); } } } } // get the options List<Option> options = pollListManager.getOptionsForPoll(pollId); return options; } @EntityCustomAction(action = "option-view", viewKey = EntityView.VIEW_SHOW) public Object getOptionEntity(EntityReference ref) { String id = ref.getId(); if (id == null) { return new Option(); } String currentUser = developerHelperService.getCurrentUserReference(); if (currentUser == null) { throw new EntityException( "Anonymous users cannot view specific options", ref.getId(), HttpServletResponse.SC_UNAUTHORIZED); } Option option = getOptionById(id); if (developerHelperService.isEntityRequestInternal(ref.toString())) { // ok to retrieve internally } else { // need to security check if (developerHelperService.isUserAdmin(currentUser)) { // ok to view this vote } else { // not allowed to view throw new SecurityException("User (" + currentUser + ") cannot view option (" + ref + ")"); } } return option; } @EntityCustomAction(action = "option-create", viewKey = EntityView.VIEW_NEW) public String createOptionEntity(EntityReference ref, Map<String, Object> params) { String userReference = developerHelperService.getCurrentUserReference(); if (userReference == null) { throw new EntityException( "User must be logged in to create new options", ref.getId(), HttpServletResponse.SC_UNAUTHORIZED); } Option option = new Option(); // copy from params to Option copyParamsToObject(params, option); // check minimum settings if (option.getPollId() == null) { throw new IllegalArgumentException( "Poll ID must be set to create an option"); } // check minimum settings if (option.getOptionText() == null) { throw new IllegalArgumentException( "Poll Option text must be set to create an option"); } checkOptionPermission(userReference, option); // set default values option.setUUId(UUID.randomUUID().toString()); boolean saved = pollListManager.saveOption(option); if (!saved) { throw new IllegalStateException("Unable to save option (" + option + ") for user (" + userReference + "): " + ref); } return option.getId() + ""; } /** * Helper to copy from map of parameters to object. * * @param params * source * @param object * destination */ private void copyParamsToObject(Map<String, Object> params, Object object) { Class<?> c = object.getClass(); Method[] methods = c.getDeclaredMethods(); for (Method m : methods) { String name = m.getName(); Class<?>[] types = m.getParameterTypes(); if (name.startsWith("set") && (types.length == 1)) { String key = Character.toLowerCase(name.charAt(3)) + name.substring(4); Object value = params.get(key); if (value != null) { if (types[0].equals(Date.class)) { Date dateValue = new Date( Long.valueOf(value.toString())); try { m.invoke(object, new Object[] { dateValue }); } catch (IllegalAccessException e) { log.debug(e); } catch (IllegalArgumentException e) { log.debug(e); } catch (InvocationTargetException e) { log.debug(e); } } else { // use generic converter from BeanUtils try { BeanUtils.copyProperty(object, key, value); } catch (IllegalAccessException e) { log.debug(e); } catch (InvocationTargetException e) { log.debug(e); } } } } } } @EntityCustomAction(action = "option-update", viewKey = EntityView.VIEW_EDIT) public void updateOptionEntity(EntityReference ref, Map<String, Object> params) { String id = ref.getId(); if (id == null) { throw new IllegalArgumentException( "The reference must include an id for updates (id is currently null)"); } String userReference = developerHelperService.getCurrentUserReference(); if (userReference == null) { throw new EntityException("Anonymous user cannot update option", ref.getId(), HttpServletResponse.SC_UNAUTHORIZED); } Option current = getOptionById(id); if (current == null) { throw new IllegalArgumentException( "No option found to update for the given reference: " + ref); } Option option = new Option(); // copy from params to Option copyParamsToObject(params, option); checkOptionPermission(userReference, current); developerHelperService.copyBean(option, current, 0, new String[] { "id", "pollId", "UUId" }, true); boolean saved = pollListManager.saveOption(current); if (!saved) { throw new IllegalStateException("Unable to update option (" + option + ") for user (" + userReference + "): " + ref); } } @EntityCustomAction(action = "option-delete", viewKey = EntityView.VIEW_SHOW) public void deleteOptionEntity(EntityReference ref, Map<String, Object> params) { String id = ref.getId(); String userReference = developerHelperService.getCurrentUserReference(); if (userReference == null) { throw new EntityException("Anonymous user cannot delete option", ref.getId(), HttpServletResponse.SC_UNAUTHORIZED); } Option option = getOptionById(id); if (option == null) { throw new IllegalArgumentException( "No option found to delete for the given reference: " + ref); } checkOptionPermission(userReference, option); pollListManager.deleteOption(option); // return String.format("Poll option id %d removed", id); } /** * Checks if the given user can create/update/delete options * * @param userRef * @param option */ private void checkOptionPermission(String userRef, Option option) { if (option.getPollId() == null) { throw new IllegalArgumentException( "Poll Id must be set in the option to check permissions: " + option); } Long pollId = option.getPollId(); // validate poll exists Poll poll = pollListManager.getPollById(pollId, false); if (poll == null) { throw new IllegalArgumentException("Invalid poll id (" + pollId + "), could not find poll from option: " + option); } // check permissions String siteRef = "/site/" + poll.getSiteId(); if (!developerHelperService.isUserAllowedInEntityReference(userRef, PollListManager.PERMISSION_ADD, siteRef)) { throw new SecurityException( "User (" + userRef + ") is not allowed to create/update/delete options in this poll (" + pollId + ")"); } } /** * @param id * @return */ private Option getOptionById(String id) { Long optionId; try { optionId = Long.valueOf(id); } catch (NumberFormatException e) { throw new IllegalArgumentException("Cannot convert id (" + id + ") to long: " + e.getMessage(), e); } Option option = pollListManager.getOptionById(optionId); return option; } @EntityCustomAction(action = "vote-create", viewKey = EntityView.VIEW_NEW) public String createVoteEntity(EntityReference ref, Map<String, Object> params) { String userId = userDirectoryService.getCurrentUser().getId(); Vote vote = new Vote(); copyParamsToObject(params, vote); log.debug("got vote: " + vote.toString()); Long pollId = null; try { pollId = Long.valueOf((String) params.get("pollId")); } catch (Exception e) { log.warn(e); } if (pollId == null) { throw new IllegalArgumentException( "Poll Id must be set to create a vote"); } vote.setPollId(pollId); Long optionId = null; try { optionId = Long.valueOf((String) params.get("pollOption")); } catch (Exception e) { log.warn(e); } if (optionId == null) { throw new IllegalArgumentException( "Poll Option must be set to create a vote"); } if (!pollVoteManager.isUserAllowedVote(userId, pollId, false)) { throw new SecurityException("User (" + userId + ") is not allowed to vote in this poll (" + pollId + ")"); } vote.setPollOption(optionId); // validate option Option option = pollListManager.getOptionById(vote.getPollOption()); if (option == null) { throw new IllegalArgumentException("Invalid poll option (" + vote.getPollOption() + ") [cannot find option] in vote (" + vote + ") for user (" + userId + ")"); } else { if (!pollId.equals(option.getPollId())) { throw new IllegalArgumentException("Invalid poll option (" + vote.getPollOption() + ") [not in poll (" + pollId + ")] in vote (" + vote + ") for user (" + userId + ")"); } } // set default vote values vote.setVoteDate(new Date()); vote.setUserId(userId); if (vote.getSubmissionId() == null) { String sid = userId + ":" + UUID.randomUUID(); vote.setSubmissionId(sid); } // set the IP address UsageSession usageSession = usageSessionService.getSession(); if (usageSession != null) { vote.setIp(usageSession.getIpAddress()); } boolean saved = pollVoteManager.saveVote(vote); if (!saved) { throw new IllegalStateException("Unable to save vote (" + vote + ") for user (" + userId + "): " + ref); } return vote.getId() + ""; } @EntityCustomAction(action = "vote-view", viewKey = EntityView.VIEW_SHOW) public Object getVoteEntity(EntityReference ref) { String id = ref.getId(); String currentUser = developerHelperService.getCurrentUserReference(); log.debug("current user is: " + currentUser); if (currentUser == null || currentUser.length() == 0) { throw new EntityException( "Anonymous users cannot view specific votes", ref.getId(), HttpServletResponse.SC_UNAUTHORIZED); } // is this a new object? if (ref.getId() == null) { new Vote(); } Vote vote = getVoteById(id); String userId = developerHelperService.getUserIdFromRef(currentUser); if (developerHelperService.isUserAdmin(currentUser)) { // ok to view this vote } else if (userId.equals(vote.getUserId())) { // ok to view own } else if (developerHelperService.isEntityRequestInternal(ref .toString())) { // ok for all internal requests } else { // TODO - check vote location and perm? // not allowed to view throw new SecurityException("User (" + currentUser + ") cannot view vote (" + ref + ")"); } if (id == null) { return new Vote(); } return vote; } @EntityCustomAction(action = "vote-list", viewKey = EntityView.VIEW_LIST) public List<?> getVoteEntities(EntityReference ref, Search search) { String currentUserId = userDirectoryService.getCurrentUser().getId(); Restriction pollRes = search.getRestrictionByProperty("pollId"); if (pollRes == null || pollRes.getSingleValue() == null) { // throw new // IllegalArgumentException("Must include a non-null pollId in order to retreive a list of votes"); return null; } Long pollId = null; boolean viewVoters = false; if (developerHelperService.isUserAdmin(developerHelperService .getCurrentUserReference())) { viewVoters = true; } try { pollId = developerHelperService.convert(pollRes.getSingleValue(), Long.class); } catch (UnsupportedOperationException e) { throw new IllegalArgumentException( "Invalid: pollId must be a long number: " + e.getMessage(), e); } Poll poll = pollListManager.getPollById(pollId); if (poll == null) { throw new IllegalArgumentException("pollId (" + pollId + ") is invalid and does not match any known polls"); } List<Vote> votes = pollVoteManager.getAllVotesForPoll(poll); if (developerHelperService.isEntityRequestInternal(ref.toString())) { // ok for all internal requests } else if (!pollListManager.isAllowedViewResults(poll, currentUserId)) { // TODO - check vote location and perm? // not allowed to view throw new SecurityException("User (" + currentUserId + ") cannot view vote (" + ref + ")"); } if (viewVoters) { return votes; } else { return anonymizeVotes(votes); } } private List<?> anonymizeVotes(List<Vote> votes) { List<Vote> ret = new ArrayList<Vote>(); String userId = userDirectoryService.getCurrentUser().getId(); for (int i = 0; i < votes.size(); i++) { Vote vote = (Vote) votes.get(i); if (!userId.equals(vote.getUserId())) { Vote newVote = new Vote(); newVote.setPollId(vote.getPollId()); newVote.setPollOption(vote.getPollOption()); newVote.setSubmissionId(vote.getSubmissionId()); ret.add(newVote); } else { ret.add(vote); } } return ret; } /** * Allows a user to create multiple Vote objects at once, taking one or more * pollOption parameters. */ @EntityCustomAction(action = "vote", viewKey = EntityView.VIEW_NEW) public List<Vote> vote(EntityView view, EntityReference ref, String prefix, Search search, OutputStream out, Map<String, Object> params) { Long pollId = null; try { pollId = Long.valueOf((String) params.get("pollId")); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("No pollId found."); } String userId = userDirectoryService.getCurrentUser().getId(); Poll poll = pollListManager.getPollById(pollId, false); if (poll == null) { throw new IllegalArgumentException( "No poll found to update for the given reference: " + ref); } if (!pollVoteManager.isUserAllowedVote(userId, poll.getPollId(), false)) { throw new SecurityException("User (" + userId + ") is not allowed to vote in this poll (" + poll.getPollId() + ")"); } Set<String> optionIds = new HashSet<String>(); Object param = params.get("pollOption"); if (param == null) { throw new IllegalArgumentException( "At least one pollOption parameter must be provided to vote."); } else if (param instanceof String) { optionIds.add((String) param); } else if (param instanceof Iterable<?>) { for (Object o : (Iterable<?>) param) if (o instanceof String) optionIds.add((String) o); else throw new IllegalArgumentException( "Each pollOption must be a String, not " + o.getClass().getName()); } else if (param instanceof Object[]) { for (Object o : (Object[]) param) if (o instanceof String) optionIds.add((String) o); else throw new IllegalArgumentException( "Each pollOption must be a String, not " + o.getClass().getName()); } else throw new IllegalArgumentException( "pollOption must be String, String[] or List<String>, not " + param.getClass().getName()); // Turn each option String into an Option, making sure that each is a // valid choice for the poll. We use a Map to make sure one cannot vote // more than once for any option by specifying it using equivalent // representations Map<Long, Option> options = new HashMap<Long, Option>(); for (String optionId : optionIds) { try { Option option = pollListManager.getOptionById(Long .valueOf(optionId)); if (!poll.getPollId().equals(option.getPollId())) throw new Exception(); options.put(option.getOptionId(), option); } catch (Exception e) { throw new IllegalArgumentException("Invalid pollOption: " + optionId); } } // Validate that the number of options voted for is within acceptable // bounds. if (options.size() < poll.getMinOptions()) throw new IllegalArgumentException("You must provide at least " + poll.getMinOptions() + " options, not " + options.size() + "."); if (options.size() > poll.getMaxOptions()) throw new IllegalArgumentException("You may provide at most " + poll.getMaxOptions() + " options, not " + options.size() + "."); // Create and save the Vote objects. UsageSession usageSession = usageSessionService.getSession(); List<Vote> votes = new ArrayList<Vote>(); for (Option option : options.values()) { Vote vote = new Vote(); vote.setVoteDate(new Date()); vote.setUserId(userId); vote.setPollId(poll.getPollId()); vote.setPollOption(option.getOptionId()); if (vote.getSubmissionId() == null) { String sid = userId + ":" + UUID.randomUUID(); vote.setSubmissionId(sid); } if (usageSession != null) vote.setIp(usageSession.getIpAddress()); boolean saved = pollVoteManager.saveVote(vote); if (!saved) { throw new IllegalStateException("Unable to save vote (" + vote + ") for user (" + userId + "): " + ref); } votes.add(vote); } return votes; } /** * @param id * @return */ private Vote getVoteById(String id) { Long voteId; try { voteId = Long.valueOf(id); } catch (NumberFormatException e) { throw new IllegalArgumentException("Cannot convert id (" + id + ") to long: " + e.getMessage(), e); } Vote vote = pollVoteManager.getVoteById(voteId); return vote; } }
package com.auth0.client.mgmt; import com.auth0.client.mgmt.filter.RulesFilter; import com.auth0.json.mgmt.Rule; import com.auth0.json.mgmt.RulesPage; import com.auth0.net.Request; import okhttp3.mockwebserver.RecordedRequest; import org.junit.Test; import java.util.List; import java.util.Map; import static com.auth0.client.MockServer.*; import static com.auth0.client.RecordedRequestMatcher.*; import static org.hamcrest.Matchers.*; import static org.hamcrest.MatcherAssert.assertThat; public class RulesEntityTest extends BaseMgmtEntityTest { @Test public void shouldListRules() throws Exception { @SuppressWarnings("deprecation") Request<List<Rule>> request = api.rules().list(null); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULES_LIST, 200); List<Rule> response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(response, is(notNullValue())); assertThat(response, hasSize(2)); } @Test public void shouldListRulesWithoutFilter() throws Exception { Request<RulesPage> request = api.rules().listAll(null); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULES_LIST, 200); RulesPage response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(response, is(notNullValue())); assertThat(response.getItems(), hasSize(2)); } @Test public void shouldListRulesWithEnabled() throws Exception { RulesFilter filter = new RulesFilter().withEnabled(true); @SuppressWarnings("deprecation") Request<List<Rule>> request = api.rules().list(filter); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULES_LIST, 200); List<Rule> response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(recordedRequest, hasQueryParameter("enabled", "true")); assertThat(response, is(notNullValue())); assertThat(response, hasSize(2)); } @Test public void shouldListRulesWithFields() throws Exception { RulesFilter filter = new RulesFilter().withFields("some,random,fields", true); @SuppressWarnings("deprecation") Request<List<Rule>> request = api.rules().list(filter); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULES_LIST, 200); List<Rule> response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(recordedRequest, hasQueryParameter("fields", "some,random,fields")); assertThat(recordedRequest, hasQueryParameter("include_fields", "true")); assertThat(response, is(notNullValue())); assertThat(response, hasSize(2)); } @Test public void shouldNotListRulesWithTotals() throws Exception { RulesFilter filter = new RulesFilter().withTotals(true); @SuppressWarnings("deprecation") Request<List<Rule>> request = api.rules().list(filter); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULES_LIST, 200); List<Rule> response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(recordedRequest, not(hasQueryParameter("include_totals"))); assertThat(response, is(notNullValue())); assertThat(response, hasSize(2)); } @Test public void shouldListRulesWithPage() throws Exception { RulesFilter filter = new RulesFilter().withPage(23, 5); Request<RulesPage> request = api.rules().listAll(filter); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULES_LIST, 200); RulesPage response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(recordedRequest, hasQueryParameter("page", "23")); assertThat(recordedRequest, hasQueryParameter("per_page", "5")); assertThat(response, is(notNullValue())); assertThat(response.getItems(), hasSize(2)); } @Test public void shouldListRulesWithTotals() throws Exception { RulesFilter filter = new RulesFilter().withTotals(true); Request<RulesPage> request = api.rules().listAll(filter); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULES_PAGED_LIST, 200); RulesPage response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(recordedRequest, hasQueryParameter("include_totals", "true")); assertThat(response, is(notNullValue())); assertThat(response.getItems(), hasSize(2)); assertThat(response.getStart(), is(0)); assertThat(response.getLength(), is(14)); assertThat(response.getTotal(), is(14)); assertThat(response.getLimit(), is(50)); } @Test public void shouldReturnEmptyRules() throws Exception { @SuppressWarnings("deprecation") Request<List<Rule>> request = api.rules().list(null); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_EMPTY_LIST, 200); List<Rule> response = request.execute(); assertThat(response, is(notNullValue())); assertThat(response, is(emptyCollectionOf(Rule.class))); } @Test public void shouldThrowOnGetRuleWithNullId() { exception.expect(IllegalArgumentException.class); exception.expectMessage("'rule id' cannot be null!"); api.rules().get(null, null); } @Test public void shouldGetRule() throws Exception { Request<Rule> request = api.rules().get("1", null); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULE, 200); Rule response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules/1")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(response, is(notNullValue())); } @Test public void shouldGetRuleWithFields() throws Exception { RulesFilter filter = new RulesFilter().withFields("some,random,fields", true); Request<Rule> request = api.rules().get("1", filter); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULE, 200); Rule response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("GET", "/api/v2/rules/1")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); assertThat(recordedRequest, hasQueryParameter("fields", "some,random,fields")); assertThat(recordedRequest, hasQueryParameter("include_fields", "true")); assertThat(response, is(notNullValue())); } @Test public void shouldThrowOnCreateRuleWithNullData() { exception.expect(IllegalArgumentException.class); exception.expectMessage("'rule' cannot be null!"); api.rules().create(null); } @Test public void shouldCreateRule() throws Exception { Request<Rule> request = api.rules().create(new Rule("my-rule", "function(){}")); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULE, 200); Rule response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("POST", "/api/v2/rules")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); Map<String, Object> body = bodyFromRequest(recordedRequest); assertThat(body.size(), is(2)); assertThat(body, hasEntry("name", "my-rule")); assertThat(body, hasEntry("script", "function(){}")); assertThat(response, is(notNullValue())); } @Test public void shouldThrowOnDeleteRuleWithNullId() { exception.expect(IllegalArgumentException.class); exception.expectMessage("'rule id' cannot be null!"); api.rules().delete(null); } @Test public void shouldDeleteRule() throws Exception { Request<Void> request = api.rules().delete("1"); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_RULE, 200); request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("DELETE", "/api/v2/rules/1")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); } @Test public void shouldThrowOnUpdateRuleWithNullId() { exception.expect(IllegalArgumentException.class); exception.expectMessage("'rule id' cannot be null!"); api.rules().update(null, new Rule("my-rule", "function(){}")); } @Test public void shouldThrowOnUpdateRuleWithNullData() { exception.expect(IllegalArgumentException.class); exception.expectMessage("'rule' cannot be null!"); api.rules().update("1", null); } @Test public void shouldUpdateRule() throws Exception { Request<Rule> request = api.rules().update("1", new Rule("my-rule", "function(){}")); assertThat(request, is(notNullValue())); server.jsonResponse(MGMT_CONNECTION, 200); Rule response = request.execute(); RecordedRequest recordedRequest = server.takeRequest(); assertThat(recordedRequest, hasMethodAndPath("PATCH", "/api/v2/rules/1")); assertThat(recordedRequest, hasHeader("Content-Type", "application/json")); assertThat(recordedRequest, hasHeader("Authorization", "Bearer apiToken")); Map<String, Object> body = bodyFromRequest(recordedRequest); assertThat(body.size(), is(2)); assertThat(body, hasEntry("name", "my-rule")); assertThat(body, hasEntry("script", "function(){}")); assertThat(response, is(notNullValue())); } }
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.nio; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.config.NetworkConfig; import com.hazelcast.config.SSLConfig; import com.hazelcast.config.SocketInterceptorConfig; import com.hazelcast.config.SymmetricEncryptionConfig; import com.hazelcast.instance.HazelcastThreadGroup; import com.hazelcast.instance.Node; import com.hazelcast.instance.NodeState; import com.hazelcast.instance.OutOfMemoryErrorDispatcher; import com.hazelcast.internal.ascii.TextCommandService; import com.hazelcast.internal.cluster.impl.ClusterServiceImpl; import com.hazelcast.internal.networking.IOOutOfMemoryHandler; import com.hazelcast.internal.networking.ReadHandler; import com.hazelcast.internal.networking.SocketChannelWrapperFactory; import com.hazelcast.internal.networking.WriteHandler; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.logging.LoggingService; import com.hazelcast.nio.tcp.TcpIpConnection; import com.hazelcast.spi.EventService; import com.hazelcast.spi.ExecutionService; import com.hazelcast.spi.annotation.PrivateApi; import com.hazelcast.spi.impl.NodeEngineImpl; import com.hazelcast.spi.properties.GroupProperty; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; @PrivateApi public class NodeIOService implements IOService { private final Node node; private final NodeEngineImpl nodeEngine; public NodeIOService(Node node, NodeEngineImpl nodeEngine) { this.node = node; this.nodeEngine = nodeEngine; } @Override public HazelcastThreadGroup getHazelcastThreadGroup() { return nodeEngine.getHazelcastThreadGroup(); } @Override public LoggingService getLoggingService() { return nodeEngine.getLoggingService(); } @Override public boolean isActive() { return node.getState() != NodeState.SHUT_DOWN; } @Override public IOOutOfMemoryHandler getIoOutOfMemoryHandler() { return new IOOutOfMemoryHandler() { @Override public void handle(OutOfMemoryError error) { OutOfMemoryErrorDispatcher.onOutOfMemory(error); } }; } @Override public Address getThisAddress() { return node.getThisAddress(); } @Override public void onFatalError(Exception e) { HazelcastThreadGroup threadGroup = node.getHazelcastThreadGroup(); Thread thread = new Thread(threadGroup.getInternalThreadGroup(), threadGroup.getThreadNamePrefix("io.error.shutdown")) { public void run() { node.shutdown(false); } }; thread.start(); } @Override public SocketInterceptorConfig getSocketInterceptorConfig() { return node.getConfig().getNetworkConfig().getSocketInterceptorConfig(); } @Override public SymmetricEncryptionConfig getSymmetricEncryptionConfig() { return node.getConfig().getNetworkConfig().getSymmetricEncryptionConfig(); } @Override public SSLConfig getSSLConfig() { return node.getConfig().getNetworkConfig().getSSLConfig(); } @Override public void handleClientMessage(ClientMessage cm, Connection connection) { node.clientEngine.handleClientMessage(cm, connection); } @Override public TextCommandService getTextCommandService() { return node.getTextCommandService(); } @Override public boolean isMemcacheEnabled() { return node.getProperties().getBoolean(GroupProperty.MEMCACHE_ENABLED); } @Override public boolean isRestEnabled() { return node.getProperties().getBoolean(GroupProperty.REST_ENABLED); } @Override public boolean isHealthcheckEnabled() { return node.getProperties().getBoolean(GroupProperty.HTTP_HEALTHCHECK_ENABLED); } @Override public void removeEndpoint(final Address endPoint) { nodeEngine.getExecutionService().execute(ExecutionService.IO_EXECUTOR, new Runnable() { @Override public void run() { // we can safely pass null because removeEndpoint is triggered from the connectionManager after // the connection is closed. So a reason is already set. node.clusterService.removeAddress(endPoint, null); } }); } @Override public void onDisconnect(final Address endpoint, Throwable cause) { if (cause == null) { // connection is closed explicitly. we should not attempt to reconnect return; } if (node.clusterService.getMember(endpoint) != null) { nodeEngine.getExecutionService().execute(ExecutionService.IO_EXECUTOR, new ReconnectionTask(endpoint)); } } @Override public void onSuccessfulConnection(Address address) { if (!node.joined()) { node.getJoiner().unblacklist(address); } } @Override public void onFailedConnection(final Address address) { if (!node.joined()) { node.getJoiner().blacklist(address, false); } else { if (node.clusterService.getMember(address) != null) { nodeEngine.getExecutionService().schedule(ExecutionService.IO_EXECUTOR, new ReconnectionTask(address), getConnectionMonitorInterval(), TimeUnit.MILLISECONDS); } } } @Override public void shouldConnectTo(Address address) { if (node.getThisAddress().equals(address)) { throw new RuntimeException("Connecting to self! " + address); } } @Override public boolean isSocketBind() { return node.getProperties().getBoolean(GroupProperty.SOCKET_CLIENT_BIND); } @Override public boolean isSocketBindAny() { return node.getProperties().getBoolean(GroupProperty.SOCKET_CLIENT_BIND_ANY); } @Override public int getSocketReceiveBufferSize() { return node.getProperties().getInteger(GroupProperty.SOCKET_RECEIVE_BUFFER_SIZE); } @Override public int getSocketSendBufferSize() { return node.getProperties().getInteger(GroupProperty.SOCKET_SEND_BUFFER_SIZE); } @Override public boolean isSocketBufferDirect() { return node.getProperties().getBoolean(GroupProperty.SOCKET_BUFFER_DIRECT); } @Override public int getSocketClientReceiveBufferSize() { int clientSendBuffer = node.getProperties().getInteger(GroupProperty.SOCKET_CLIENT_RECEIVE_BUFFER_SIZE); return clientSendBuffer != -1 ? clientSendBuffer : getSocketReceiveBufferSize(); } @Override public int getSocketClientSendBufferSize() { int clientReceiveBuffer = node.getProperties().getInteger(GroupProperty.SOCKET_CLIENT_SEND_BUFFER_SIZE); return clientReceiveBuffer != -1 ? clientReceiveBuffer : getSocketReceiveBufferSize(); } @Override public int getSocketLingerSeconds() { return node.getProperties().getSeconds(GroupProperty.SOCKET_LINGER_SECONDS); } @Override public int getSocketConnectTimeoutSeconds() { return node.getProperties().getSeconds(GroupProperty.SOCKET_CONNECT_TIMEOUT_SECONDS); } @Override public boolean getSocketKeepAlive() { return node.getProperties().getBoolean(GroupProperty.SOCKET_KEEP_ALIVE); } @Override public boolean getSocketNoDelay() { return node.getProperties().getBoolean(GroupProperty.SOCKET_NO_DELAY); } @Override public int getInputSelectorThreadCount() { return node.getProperties().getInteger(GroupProperty.IO_INPUT_THREAD_COUNT); } @Override public int getOutputSelectorThreadCount() { return node.getProperties().getInteger(GroupProperty.IO_OUTPUT_THREAD_COUNT); } @Override public boolean isClient() { return false; } @Override public long getConnectionMonitorInterval() { return node.getProperties().getMillis(GroupProperty.CONNECTION_MONITOR_INTERVAL); } @Override public int getConnectionMonitorMaxFaults() { return node.getProperties().getInteger(GroupProperty.CONNECTION_MONITOR_MAX_FAULTS); } @Override public int getBalancerIntervalSeconds() { return node.getProperties().getSeconds(GroupProperty.IO_BALANCER_INTERVAL_SECONDS); } @Override public void executeAsync(final Runnable runnable) { nodeEngine.getExecutionService().execute(ExecutionService.IO_EXECUTOR, runnable); } @Override public EventService getEventService() { return nodeEngine.getEventService(); } @Override public InternalSerializationService getSerializationService() { return node.getSerializationService(); } @Override public SocketChannelWrapperFactory getSocketChannelWrapperFactory() { return node.getNodeExtension().getSocketChannelWrapperFactory(); } @Override public MemberSocketInterceptor getMemberSocketInterceptor() { return node.getNodeExtension().getMemberSocketInterceptor(); } @Override public ReadHandler createReadHandler(TcpIpConnection connection) { return node.getNodeExtension().createReadHandler(connection, this); } @Override public WriteHandler createWriteHandler(TcpIpConnection connection) { return node.getNodeExtension().createWriteHandler(connection, this); } @Override public Collection<Integer> getOutboundPorts() { final NetworkConfig networkConfig = node.getConfig().getNetworkConfig(); final Collection<String> portDefinitions = getPortDefinitions(networkConfig); final Set<Integer> ports = getPorts(networkConfig); if (portDefinitions.isEmpty() && ports.isEmpty()) { // means any port return Collections.emptySet(); } if (portDefinitions.contains("*") || portDefinitions.contains("0")) { // means any port return Collections.emptySet(); } transformPortDefinitionsToPorts(portDefinitions, ports); if (ports.contains(0)) { // means any port return Collections.emptySet(); } return ports; } private void transformPortDefinitionsToPorts(Collection<String> portDefinitions, Set<Integer> ports) { // not checking port ranges... for (String portDef : portDefinitions) { String[] portDefs = portDef.split("[,; ]"); for (String def : portDefs) { def = def.trim(); if (def.isEmpty()) { continue; } final int dashPos = def.indexOf('-'); if (dashPos > 0) { final int start = Integer.parseInt(def.substring(0, dashPos)); final int end = Integer.parseInt(def.substring(dashPos + 1)); for (int port = start; port <= end; port++) { ports.add(port); } } else { ports.add(Integer.parseInt(def)); } } } } private Set<Integer> getPorts(NetworkConfig networkConfig) { return networkConfig.getOutboundPorts() == null ? new HashSet<Integer>() : new HashSet<Integer>(networkConfig.getOutboundPorts()); } private Collection<String> getPortDefinitions(NetworkConfig networkConfig) { return networkConfig.getOutboundPortDefinitions() == null ? Collections.<String>emptySet() : networkConfig.getOutboundPortDefinitions(); } private class ReconnectionTask implements Runnable { private final Address endpoint; ReconnectionTask(Address endpoint) { this.endpoint = endpoint; } @Override public void run() { ClusterServiceImpl clusterService = node.clusterService; if (clusterService.getMember(endpoint) != null) { node.connectionManager.getOrConnect(endpoint); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.bwcompat; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotRestoreException; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.SortedSet; import java.util.TreeSet; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; @ClusterScope(scope = Scope.TEST) public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { if (randomBoolean()) { // Configure using path.repo return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(Environment.PATH_REPO_SETTING.getKey(), getBwcIndicesPath()) .build(); } else { // Configure using url white list try { URI repoJarPatternUri = new URI("jar:" + getBwcIndicesPath().toUri().toString() + "*.zip!/repo/"); return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .putArray(URLRepository.ALLOWED_URLS_SETTING.getKey(), repoJarPatternUri.toString()) .build(); } catch (URISyntaxException ex) { throw new IllegalArgumentException(ex); } } } public void testRestoreOldSnapshots() throws Exception { String repo = "test_repo"; String snapshot = "test_1"; List<String> repoVersions = repoVersions(); assertThat(repoVersions.size(), greaterThan(0)); for (String version : repoVersions) { createRepo("repo", version, repo); testOldSnapshot(version, repo, snapshot); } SortedSet<String> expectedVersions = new TreeSet<>(); for (Version v : VersionUtils.allReleasedVersions()) { if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet if (v.isRelease() == false) continue; // no guarantees for prereleases if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself expectedVersions.add(v.toString()); } for (String repoVersion : repoVersions) { if (expectedVersions.remove(repoVersion) == false) { logger.warn("Old repositories tests contain extra repo: {}", repoVersion); } } if (expectedVersions.isEmpty() == false) { StringBuilder msg = new StringBuilder("Old repositories tests are missing versions:"); for (String expected : expectedVersions) { msg.append("\n" + expected); } fail(msg.toString()); } } public void testRestoreUnsupportedSnapshots() throws Exception { String repo = "test_repo"; String snapshot = "test_1"; List<String> repoVersions = unsupportedRepoVersions(); assertThat(repoVersions.size(), greaterThan(0)); for (String version : repoVersions) { createRepo("unsupportedrepo", version, repo); assertUnsupportedIndexFailsToRestore(repo, snapshot); } } public void testRestoreSnapshotWithMissingChecksum() throws Exception { final String repo = "test_repo"; final String snapshot = "test_1"; final String indexName = "index-2.3.4"; final String repoFileId = "missing-checksum-repo-2.3.4"; Path repoFile = getBwcIndicesPath().resolve(repoFileId + ".zip"); URI repoFileUri = repoFile.toUri(); URI repoJarUri = new URI("jar:" + repoFileUri.toString() + "!/repo/"); logger.info("--> creating repository [{}] for repo file [{}]", repo, repoFileId); assertAcked(client().admin().cluster().preparePutRepository(repo) .setType("url") .setSettings(Settings.builder().put("url", repoJarUri.toString()))); logger.info("--> get snapshot and check its indices"); GetSnapshotsResponse getSnapshotsResponse = client().admin().cluster().prepareGetSnapshots(repo).setSnapshots(snapshot).get(); assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1)); SnapshotInfo snapshotInfo = getSnapshotsResponse.getSnapshots().get(0); assertThat(snapshotInfo.indices(), equalTo(Arrays.asList(indexName))); logger.info("--> restoring snapshot"); RestoreSnapshotResponse response = client().admin().cluster().prepareRestoreSnapshot(repo, snapshot).setRestoreGlobalState(true).setWaitForCompletion(true).get(); assertThat(response.status(), equalTo(RestStatus.OK)); RestoreInfo restoreInfo = response.getRestoreInfo(); assertThat(restoreInfo.successfulShards(), greaterThan(0)); assertThat(restoreInfo.successfulShards(), equalTo(restoreInfo.totalShards())); assertThat(restoreInfo.failedShards(), equalTo(0)); String index = restoreInfo.indices().get(0); assertThat(index, equalTo(indexName)); logger.info("--> check search"); SearchResponse searchResponse = client().prepareSearch(index).get(); assertThat(searchResponse.getHits().totalHits(), greaterThan(0L)); logger.info("--> cleanup"); cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()])); cluster().wipeTemplates(); } private List<String> repoVersions() throws Exception { return listRepoVersions("repo"); } private List<String> unsupportedRepoVersions() throws Exception { return listRepoVersions("unsupportedrepo"); } private List<String> listRepoVersions(String prefix) throws Exception { List<String> repoVersions = new ArrayList<>(); Path repoFiles = getBwcIndicesPath(); try (DirectoryStream<Path> stream = Files.newDirectoryStream(repoFiles, prefix + "-*.zip")) { for (Path entry : stream) { String fileName = entry.getFileName().toString(); String version = fileName.substring(prefix.length() + 1); version = version.substring(0, version.length() - ".zip".length()); repoVersions.add(version); } } return repoVersions; } private void createRepo(String prefix, String version, String repo) throws Exception { Path repoFile = getBwcIndicesPath().resolve(prefix + "-" + version + ".zip"); URI repoFileUri = repoFile.toUri(); URI repoJarUri = new URI("jar:" + repoFileUri.toString() + "!/repo/"); logger.info("--> creating repository [{}] for version [{}]", repo, version); assertAcked(client().admin().cluster().preparePutRepository(repo) .setType("url").setSettings(Settings.builder() .put("url", repoJarUri.toString()))); } private void testOldSnapshot(String version, String repo, String snapshot) throws IOException { logger.info("--> get snapshot and check its version"); GetSnapshotsResponse getSnapshotsResponse = client().admin().cluster().prepareGetSnapshots(repo).setSnapshots(snapshot).get(); assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1)); SnapshotInfo snapshotInfo = getSnapshotsResponse.getSnapshots().get(0); assertThat(snapshotInfo.version().toString(), equalTo(version)); logger.info("--> restoring snapshot"); RestoreSnapshotResponse response = client().admin().cluster().prepareRestoreSnapshot(repo, snapshot).setRestoreGlobalState(true).setWaitForCompletion(true).get(); assertThat(response.status(), equalTo(RestStatus.OK)); RestoreInfo restoreInfo = response.getRestoreInfo(); assertThat(restoreInfo.successfulShards(), greaterThan(0)); assertThat(restoreInfo.successfulShards(), equalTo(restoreInfo.totalShards())); assertThat(restoreInfo.failedShards(), equalTo(0)); String index = restoreInfo.indices().get(0); logger.info("--> check search"); SearchResponse searchResponse = client().prepareSearch(index).get(); assertThat(searchResponse.getHits().totalHits(), greaterThan(1L)); logger.info("--> check settings"); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); assertThat(clusterState.metaData().persistentSettings().get(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "version_attr"), equalTo(version)); logger.info("--> check templates"); IndexTemplateMetaData template = clusterState.getMetaData().templates().get("template_" + version.toLowerCase(Locale.ROOT)); assertThat(template, notNullValue()); assertThat(template.patterns(), equalTo(Collections.singletonList("te*"))); assertThat(template.settings().getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1), equalTo(1)); assertThat(template.mappings().size(), equalTo(1)); assertThat(template.mappings().get("type1").string(), equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}")); assertThat(template.aliases().size(), equalTo(3)); assertThat(template.aliases().get("alias1"), notNullValue()); assertThat(template.aliases().get("alias2").filter().string(), containsString(version)); assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy")); assertThat(template.aliases().get("{index}-alias"), notNullValue()); logger.info("--> cleanup"); cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()])); cluster().wipeTemplates(); } private void assertUnsupportedIndexFailsToRestore(String repo, String snapshot) throws IOException { logger.info("--> restoring unsupported snapshot"); try { client().admin().cluster().prepareRestoreSnapshot(repo, snapshot).setRestoreGlobalState(true).setWaitForCompletion(true).get(); fail("should have failed to restore"); } catch (SnapshotRestoreException ex) { assertThat(ex.getMessage(), containsString("cannot restore index")); assertThat(ex.getMessage(), containsString("because it cannot be upgraded")); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jmeter.visualizers; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.List; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTabbedPane; import javax.swing.JTextArea; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.extractor.BoundaryExtractor; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.gui.GuiUtils; import org.apache.jorphan.gui.JLabeledTextField; /** * Implement ResultsRender for Boundary Extractor tester */ public class RenderAsBoundaryExtractor implements ResultRenderer, ActionListener { private static final String BOUNDARY_EXTRACTOR_TESTER_COMMAND = "boundary_extractor_tester"; // $NON-NLS-1$ private JPanel boundaryExtractorPane; private JTextArea boundaryExtractorDataField; private JLabeledTextField boundaryExtractorFieldLeft; private JLabeledTextField boundaryExtractorFieldRight; private JTextArea boundaryExtractorResultField; private JTabbedPane rightSide; /** * Display the response as text or as rendered HTML. Change the text on the * button appropriate to the current display. * * @param e the ActionEvent being processed */ @Override public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); String boundaryExtractorDataFieldText = boundaryExtractorDataField.getText(); if (StringUtils.isNotEmpty(boundaryExtractorDataFieldText) && BOUNDARY_EXTRACTOR_TESTER_COMMAND.equals(command)) { executeAndShowBoundaryExtractorTester(boundaryExtractorDataFieldText); } } /** * Launch boundaryExtractor engine to parse a input text * @param textToParse */ private void executeAndShowBoundaryExtractorTester(String textToParse) { if (textToParse != null && textToParse.length() > 0 && this.boundaryExtractorFieldLeft.getText().length() > 0 && this.boundaryExtractorFieldRight.getText().length() > 0) { this.boundaryExtractorResultField.setText(process(textToParse)); this.boundaryExtractorResultField.setCaretPosition(0); // go to first line } } private String process(String textToParse) { BoundaryExtractor extractor = new BoundaryExtractor(); List<String> matches = extractor.extractAll( boundaryExtractorFieldLeft.getText(), boundaryExtractorFieldRight.getText(), textToParse); int nbFound = matches.size(); // Construct a multi-line string with all matches StringBuilder sb = new StringBuilder(); sb.append("Match count: ").append(nbFound).append("\n"); for (int j = 0; j < nbFound; j++) { String match = matches.get(j); sb.append("Match[").append(j+1).append("]=").append(match).append("\n"); } return sb.toString(); } /** {@inheritDoc} */ @Override public void clearData() { this.boundaryExtractorDataField.setText(""); // $NON-NLS-1$ this.boundaryExtractorFieldLeft.setText(""); // $NON-NLS-1$ this.boundaryExtractorFieldRight.setText(""); // $NON-NLS-1$ this.boundaryExtractorResultField.setText(""); // $NON-NLS-1$ } /** {@inheritDoc} */ @Override public void init() { // Create the panels for the boundaryExtractor tab boundaryExtractorPane = createBoundaryExtractorPanel(); } /** * @return boundaryExtractor Tester panel */ private JPanel createBoundaryExtractorPanel() { boundaryExtractorDataField = new JTextArea(); boundaryExtractorDataField.setEditable(true); boundaryExtractorDataField.setLineWrap(true); boundaryExtractorDataField.setWrapStyleWord(true); JScrollPane boundaryExtractorDataPane = GuiUtils.makeScrollPane(boundaryExtractorDataField); boundaryExtractorDataPane.setPreferredSize(new Dimension(0, 200)); JPanel pane = new JPanel(new BorderLayout(0, 5)); JSplitPane mainSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT, boundaryExtractorDataPane, createBoundaryExtractorTasksPanel()); mainSplit.setDividerLocation(0.6d); mainSplit.setOneTouchExpandable(true); pane.add(mainSplit, BorderLayout.CENTER); return pane; } /** * Create the boundaryExtractor task pane * * @return boundaryExtractor task pane */ private JPanel createBoundaryExtractorTasksPanel() { JPanel boundaryExtractorActionPanel = new JPanel(); boundaryExtractorActionPanel.setLayout(new BoxLayout(boundaryExtractorActionPanel, BoxLayout.X_AXIS)); Border margin = new EmptyBorder(5, 5, 0, 5); boundaryExtractorActionPanel.setBorder(margin); boundaryExtractorFieldLeft = new JLabeledTextField(JMeterUtils.getResString("boundaryextractor_leftboundary_field")); // $NON-NLS-1$ boundaryExtractorActionPanel.add(boundaryExtractorFieldLeft, BorderLayout.WEST); boundaryExtractorFieldRight = new JLabeledTextField(JMeterUtils.getResString("boundaryextractor_rightboundary_field")); // $NON-NLS-1$ boundaryExtractorActionPanel.add(boundaryExtractorFieldRight, BorderLayout.WEST); JButton boundaryExtractorTester = new JButton(JMeterUtils.getResString("boundaryextractor_tester_button_test")); // $NON-NLS-1$ boundaryExtractorTester.setActionCommand(BOUNDARY_EXTRACTOR_TESTER_COMMAND); boundaryExtractorTester.addActionListener(this); boundaryExtractorActionPanel.add(boundaryExtractorTester, BorderLayout.EAST); boundaryExtractorResultField = new JTextArea(); boundaryExtractorResultField.setEditable(false); boundaryExtractorResultField.setLineWrap(true); boundaryExtractorResultField.setWrapStyleWord(true); JPanel boundaryExtractorTasksPanel = new JPanel(new BorderLayout(0, 5)); boundaryExtractorTasksPanel.add(boundaryExtractorActionPanel, BorderLayout.NORTH); boundaryExtractorTasksPanel.add(GuiUtils.makeScrollPane(boundaryExtractorResultField), BorderLayout.CENTER); return boundaryExtractorTasksPanel; } /** {@inheritDoc} */ @Override public void setupTabPane() { // Add boundaryExtractor tester pane if (rightSide.indexOfTab(JMeterUtils.getResString("boundaryextractor_tester_title")) < 0) { // $NON-NLS-1$ rightSide.addTab(JMeterUtils.getResString("boundaryextractor_tester_title"), boundaryExtractorPane); // $NON-NLS-1$ } clearData(); } @Override public void setLastSelectedTab(int index) { // nothing to do } /** {@inheritDoc} */ @Override public synchronized void setRightSide(JTabbedPane side) { rightSide = side; } /** {@inheritDoc} */ @Override public void setSamplerResult(Object userObject) { // NOOP } /** {@inheritDoc} */ @Override public void renderResult(SampleResult sampleResult) { clearData(); String response = ViewResultsFullVisualizer.getResponseAsString(sampleResult); boundaryExtractorDataField.setText(response); boundaryExtractorDataField.setCaretPosition(0); } /** {@inheritDoc} */ @Override public String toString() { return JMeterUtils.getResString("boundaryextractor_tester_title"); // $NON-NLS-1$ } /** {@inheritDoc} */ @Override public void renderImage(SampleResult sampleResult) { clearData(); boundaryExtractorDataField.setText(JMeterUtils.getResString("boundaryextractor_render_no_text")); // $NON-NLS-1$ } /** {@inheritDoc} */ @Override public void setBackgroundColor(Color backGround) { // NOOP } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.diagnostics; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import org.apache.samza.job.model.ContainerModel; import org.apache.samza.metrics.reporter.Metrics; import org.apache.samza.metrics.reporter.MetricsHeader; import org.apache.samza.metrics.reporter.MetricsSnapshot; import org.apache.samza.serializers.model.SamzaObjectMapper; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Defines the contents for any message emitted to the diagnostic stream by the {@link DiagnosticsManager}. * All contents are stored in a {@link MetricsHeader} and a metricsMessage map which combine to get a {@link MetricsSnapshot}, * which can be serialized using serdes ({@link org.apache.samza.serializers.MetricsSnapshotSerdeV2}). * This class serializes {@link ContainerModel} using {@link SamzaObjectMapper} before adding to the metrics message. * */ public class DiagnosticsStreamMessage { private static final Logger LOG = LoggerFactory.getLogger(DiagnosticsStreamMessage.class); public static final String GROUP_NAME_FOR_DIAGNOSTICS_MANAGER = DiagnosticsManager.class.getName(); // Using DiagnosticsManager as the group name for processor-stop-events, job-related params, and container model private static final String SAMZACONTAINER_METRICS_GROUP_NAME = "org.apache.samza.container.SamzaContainerMetrics"; // Using SamzaContainerMetrics as the group name for exceptions to maintain compatibility with existing diagnostics private static final String EXCEPTION_LIST_METRIC_NAME = "exceptions"; private static final String STOP_EVENT_LIST_METRIC_NAME = "stopEvents"; private static final String CONTAINER_MB_METRIC_NAME = "containerMemoryMb"; private static final String CONTAINER_NUM_CORES_METRIC_NAME = "containerNumCores"; private static final String CONTAINER_NUM_STORES_WITH_CHANGELOG_METRIC_NAME = "numStoresWithChangelog"; private static final String CONTAINER_MAX_CONFIGURED_HEAP_METRIC_NAME = "maxHeap"; private static final String CONTAINER_THREAD_POOL_SIZE_METRIC_NAME = "containerThreadPoolSize"; private static final String CONTAINER_MODELS_METRIC_NAME = "containerModels"; private final MetricsHeader metricsHeader; private final Map<String, Map<String, Object>> metricsMessage; public DiagnosticsStreamMessage(String jobName, String jobId, String containerName, String executionEnvContainerId, String taskClassVersion, String samzaVersion, String hostname, long timestamp, long resetTimestamp) { // Create the metricHeader metricsHeader = new MetricsHeader(jobName, jobId, containerName, executionEnvContainerId, DiagnosticsManager.class.getName(), taskClassVersion, samzaVersion, hostname, timestamp, resetTimestamp); this.metricsMessage = new HashMap<>(); } /** * Add the container memory mb parameter to the message. * @param containerMemoryMb the memory mb parameter value. */ public void addContainerMb(Integer containerMemoryMb) { addToMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_MB_METRIC_NAME, containerMemoryMb); } /** * Add the container num cores parameter to the message. * @param containerNumCores the num core parameter value. */ public void addContainerNumCores(Integer containerNumCores) { addToMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_NUM_CORES_METRIC_NAME, containerNumCores); } /** * Add the num stores with changelog parameter to the message. * @param numStoresWithChangelog the parameter value. */ public void addNumStoresWithChangelog(Integer numStoresWithChangelog) { addToMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_NUM_STORES_WITH_CHANGELOG_METRIC_NAME, numStoresWithChangelog); } /** * Add the configured max heap size in bytes. * @param maxHeapSize the parameter value. */ public void addMaxHeapSize(Long maxHeapSize) { addToMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_MAX_CONFIGURED_HEAP_METRIC_NAME, maxHeapSize); } /** * Add the configured container thread pool size. * @param threadPoolSize the parameter value. */ public void addContainerThreadPoolSize(Integer threadPoolSize) { addToMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_THREAD_POOL_SIZE_METRIC_NAME, threadPoolSize); } /** * Add a map of container models (indexed by containerID) to the message. * @param containerModelMap the container models map */ public void addContainerModels(Map<String, ContainerModel> containerModelMap) { if (containerModelMap != null && !containerModelMap.isEmpty()) { addToMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_MODELS_METRIC_NAME, serializeContainerModelMap(containerModelMap)); } } /** * Add a list of {@link DiagnosticsExceptionEvent}s to the message. * @param exceptionList the list to add. */ public void addDiagnosticsExceptionEvents(Collection<DiagnosticsExceptionEvent> exceptionList) { if (exceptionList != null && !exceptionList.isEmpty()) { addToMetricsMessage(SAMZACONTAINER_METRICS_GROUP_NAME, EXCEPTION_LIST_METRIC_NAME, exceptionList); } } /** * Add a list of {@link org.apache.samza.diagnostics.ProcessorStopEvent}s to add to the list. * @param stopEventList the list to add. */ public void addProcessorStopEvents(List<ProcessorStopEvent> stopEventList) { if (stopEventList != null && !stopEventList.isEmpty()) { addToMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, STOP_EVENT_LIST_METRIC_NAME, stopEventList); } } /** * Convert this message into a {@link MetricsSnapshot}, useful for serde-deserde using {@link org.apache.samza.serializers.MetricsSnapshotSerde}. * @return */ public MetricsSnapshot convertToMetricsSnapshot() { MetricsSnapshot metricsSnapshot = new MetricsSnapshot(metricsHeader, new Metrics(metricsMessage)); return metricsSnapshot; } /** * Check if the message has no contents. * @return True if the message is empty, false otherwise. */ public boolean isEmpty() { return metricsMessage.isEmpty(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DiagnosticsStreamMessage that = (DiagnosticsStreamMessage) o; return metricsHeader.getAsMap().equals(that.metricsHeader.getAsMap()) && metricsMessage.equals(that.metricsMessage); } @Override public int hashCode() { return Objects.hash(metricsHeader, metricsMessage); } public Collection<ProcessorStopEvent> getProcessorStopEvents() { return (Collection<ProcessorStopEvent>) getFromMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, STOP_EVENT_LIST_METRIC_NAME); } public Collection<DiagnosticsExceptionEvent> getExceptionEvents() { return (Collection<DiagnosticsExceptionEvent>) getFromMetricsMessage(SAMZACONTAINER_METRICS_GROUP_NAME, EXCEPTION_LIST_METRIC_NAME); } public Integer getContainerMb() { return (Integer) getFromMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_MB_METRIC_NAME); } public Integer getContainerNumCores() { return (Integer) getFromMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_NUM_CORES_METRIC_NAME); } public Integer getNumStoresWithChangelog() { return (Integer) getFromMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_NUM_STORES_WITH_CHANGELOG_METRIC_NAME); } public Long getMaxHeapSize() { return (Long) getFromMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_MAX_CONFIGURED_HEAP_METRIC_NAME); } public Integer getContainerThreadPoolSize() { return (Integer) getFromMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_THREAD_POOL_SIZE_METRIC_NAME); } public Map<String, ContainerModel> getContainerModels() { return deserializeContainerModelMap((String) getFromMetricsMessage(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER, CONTAINER_MODELS_METRIC_NAME)); } // Helper method to get a {@link DiagnosticsStreamMessage} from a {@link MetricsSnapshot}. // * This is typically used when deserializing messages from a diagnostics-stream. // * @param metricsSnapshot public static DiagnosticsStreamMessage convertToDiagnosticsStreamMessage(MetricsSnapshot metricsSnapshot) { DiagnosticsStreamMessage diagnosticsStreamMessage = new DiagnosticsStreamMessage(metricsSnapshot.getHeader().getJobName(), metricsSnapshot.getHeader().getJobId(), metricsSnapshot.getHeader().getContainerName(), metricsSnapshot.getHeader().getExecEnvironmentContainerId(), metricsSnapshot.getHeader().getVersion(), metricsSnapshot.getHeader().getSamzaVersion(), metricsSnapshot.getHeader().getHost(), metricsSnapshot.getHeader().getTime(), metricsSnapshot.getHeader().getResetTime()); Map<String, Map<String, Object>> metricsMap = metricsSnapshot.getMetrics().getAsMap(); Map<String, Object> diagnosticsManagerGroupMap = metricsMap.get(GROUP_NAME_FOR_DIAGNOSTICS_MANAGER); Map<String, Object> containerMetricsGroupMap = metricsMap.get(SAMZACONTAINER_METRICS_GROUP_NAME); if (diagnosticsManagerGroupMap != null) { diagnosticsStreamMessage.addContainerNumCores((Integer) diagnosticsManagerGroupMap.get(CONTAINER_NUM_CORES_METRIC_NAME)); diagnosticsStreamMessage.addContainerMb((Integer) diagnosticsManagerGroupMap.get(CONTAINER_MB_METRIC_NAME)); diagnosticsStreamMessage.addNumStoresWithChangelog((Integer) diagnosticsManagerGroupMap.get(CONTAINER_NUM_STORES_WITH_CHANGELOG_METRIC_NAME)); diagnosticsStreamMessage.addContainerModels(deserializeContainerModelMap((String) diagnosticsManagerGroupMap.get(CONTAINER_MODELS_METRIC_NAME))); diagnosticsStreamMessage.addMaxHeapSize((Long) diagnosticsManagerGroupMap.get(CONTAINER_MAX_CONFIGURED_HEAP_METRIC_NAME)); diagnosticsStreamMessage.addContainerThreadPoolSize((Integer) diagnosticsManagerGroupMap.get(CONTAINER_THREAD_POOL_SIZE_METRIC_NAME)); diagnosticsStreamMessage.addProcessorStopEvents((List<ProcessorStopEvent>) diagnosticsManagerGroupMap.get(STOP_EVENT_LIST_METRIC_NAME)); } if (containerMetricsGroupMap != null && containerMetricsGroupMap.containsKey(EXCEPTION_LIST_METRIC_NAME)) { diagnosticsStreamMessage.addDiagnosticsExceptionEvents( (Collection<DiagnosticsExceptionEvent>) containerMetricsGroupMap.get(EXCEPTION_LIST_METRIC_NAME)); } return diagnosticsStreamMessage; } /** * Helper method to use {@link SamzaObjectMapper} to serialize {@link ContainerModel}s. * We use SamzaObjectMapper for ContainerModels, rather than using ObjectMapper (in MetricsSnapshotSerdeV2) * because MetricsSnapshotSerdeV2 enables default typing, which writes type information for all containerModel (and * underlying) classes, deserializing which requires a large number of jackson related changes to those classes * (annotations and/or mixins). We cannot disable default typing to avoid backward incompatibility. This is why * we serde-deserde ContainerModel explicitly using SamzaObjectMapper (which is also used for reads-writes to coordinator * stream). * {@link SamzaObjectMapper} provides several conventions and optimizations for serializing containerModels. * @param containerModelMap map of container models to serialize. * @return */ private static String serializeContainerModelMap(Map<String, ContainerModel> containerModelMap) { ObjectMapper samzaObjectMapper = SamzaObjectMapper.getObjectMapper(); try { return samzaObjectMapper.writeValueAsString(containerModelMap); } catch (IOException e) { LOG.error("Exception in serializing container model ", e); } return null; } /** * Helper method to use {@link SamzaObjectMapper} to deserialize {@link ContainerModel}s. * {@link SamzaObjectMapper} provides several conventions and optimizations for deserializing containerModels. * @return */ private static Map<String, ContainerModel> deserializeContainerModelMap( String serializedContainerModel) { Map<String, ContainerModel> containerModelMap = null; ObjectMapper samzaObjectMapper = SamzaObjectMapper.getObjectMapper(); try { if (serializedContainerModel != null) { containerModelMap = samzaObjectMapper.readValue(serializedContainerModel, new TypeReference<Map<String, ContainerModel>>() { }); } } catch (IOException e) { LOG.error("Exception in deserializing container model ", e); } return containerModelMap; } private void addToMetricsMessage(String groupName, String metricName, Object value) { if (value != null) { metricsMessage.putIfAbsent(groupName, new HashMap<>()); metricsMessage.get(groupName).put(metricName, value); } } private Object getFromMetricsMessage(String groupName, String metricName) { if (metricsMessage.containsKey(groupName) && metricsMessage.get(groupName) != null) { return metricsMessage.get(groupName).get(metricName); } else { return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ambari.logfeeder.logconfig; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.apache.ambari.logfeeder.common.LogFeederConstants; import org.apache.ambari.logfeeder.util.LogFeederUtil; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; public class LogConfigHandler extends Thread { private static final Logger LOG = Logger.getLogger(LogConfigHandler.class); private static final int DEFAULT_SOLR_CONFIG_INTERVAL = 5; private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS"; private static final String TIMEZONE = "GMT"; private static final int RETRY_INTERVAL = 30; static { TimeZone.setDefault(TimeZone.getTimeZone(TIMEZONE)); } private static ThreadLocal<DateFormat> formatter = new ThreadLocal<DateFormat>() { protected DateFormat initialValue() { SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT); dateFormat.setTimeZone(TimeZone.getTimeZone(TIMEZONE)); return dateFormat; } }; private static boolean filterEnabled; private static LogFeederFilterWrapper logFeederFilterWrapper; private static boolean running = false; public static void handleConfig() { filterEnabled = LogFeederUtil.getBooleanProperty("logfeeder.log.filter.enable", false); if (!filterEnabled) { LOG.info("Logfeeder filter Scheduler is disabled."); return; } if (!running) { new LogConfigHandler().start(); running = true; LOG.info("Logfeeder Filter Thread started!"); } else { LOG.warn("Logfeeder Filter Thread is already running."); } } private LogConfigHandler() { setName(getClass().getSimpleName()); setDaemon(true); } @Override public void run() { String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string"); String solrUrl = LogFeederUtil.getStringProperty("logfeeder.solr.url"); if (StringUtils.isBlank(zkConnectString) && StringUtils.isBlank(solrUrl)) { LOG.warn("Neither Solr ZK Connect String nor solr Url for UserConfig/History is set." + "Won't look for level configuration from Solr."); return; } int solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.interval", DEFAULT_SOLR_CONFIG_INTERVAL); do { LOG.debug("Updating config from solr after every " + solrConfigInterval + " sec."); fetchConfig(); try { Thread.sleep(1000 * solrConfigInterval); } catch (InterruptedException e) { LOG.error(e.getLocalizedMessage(), e.getCause()); } } while (true); } private synchronized void fetchConfig() { LogConfigFetcher fetcher = LogConfigFetcher.getInstance(); if (fetcher != null) { Map<String, Object> configDocMap = fetcher.getConfigDoc(); String configJson = (String) configDocMap.get(LogFeederConstants.VALUES); if (configJson != null) { logFeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, LogFeederFilterWrapper.class); } } } public static boolean isFilterAvailable() { return logFeederFilterWrapper != null; } public static List<String> getAllowedLevels(String hostName, LogFeederFilter componentFilter) { String componentName = componentFilter.getLabel(); List<String> hosts = componentFilter.getHosts(); List<String> defaultLevels = componentFilter.getDefaultLevels(); List<String> overrideLevels = componentFilter.getOverrideLevels(); String expiryTime = componentFilter.getExpiryTime(); // check is user override or not if (StringUtils.isNotEmpty(expiryTime) || CollectionUtils.isNotEmpty(overrideLevels) || CollectionUtils.isNotEmpty(hosts)) { if (CollectionUtils.isEmpty(hosts)) { // hosts list is empty or null consider it apply on all hosts hosts.add(LogFeederConstants.ALL); } if (LogFeederUtil.isListContains(hosts, hostName, false)) { if (isFilterExpired(componentFilter)) { LOG.debug("Filter for component " + componentName + " and host :" + hostName + " is expired at " + componentFilter.getExpiryTime()); return defaultLevels; } else { return overrideLevels; } } } return defaultLevels; } private static boolean isFilterExpired(LogFeederFilter logfeederFilter) { if (logfeederFilter == null) return false; Date filterEndDate = parseFilterExpireDate(logfeederFilter); if (filterEndDate == null) { return false; } Date currentDate = new Date(); if (!currentDate.before(filterEndDate)) { LOG.debug("Filter for Component :" + logfeederFilter.getLabel() + " and Hosts : [" + StringUtils.join(logfeederFilter.getHosts(), ',') + "] is expired because of filter endTime : " + formatter.get().format(filterEndDate) + " is older than currentTime :" + formatter.get().format(currentDate)); return true; } else { return false; } } private static Date parseFilterExpireDate(LogFeederFilter vLogfeederFilter) { String expiryTime = vLogfeederFilter.getExpiryTime(); if (StringUtils.isNotEmpty(expiryTime)) { try { return formatter.get().parse(expiryTime); } catch (ParseException e) { LOG.error("Filter have invalid ExpiryTime : " + expiryTime + " for component :" + vLogfeederFilter.getLabel() + " and hosts : [" + StringUtils.join(vLogfeederFilter.getHosts(), ',') + "]"); } } return null; } public static LogFeederFilter findComponentFilter(String componentName) { waitForFilter(); if (logFeederFilterWrapper != null) { HashMap<String, LogFeederFilter> filter = logFeederFilterWrapper.getFilter(); if (filter != null) { LogFeederFilter componentFilter = filter.get(componentName); if (componentFilter != null) { return componentFilter; } } } LOG.trace("Filter is not there for component :" + componentName); return null; } private static void waitForFilter() { if (!filterEnabled || logFeederFilterWrapper != null) { return; } while (true) { try { Thread.sleep(RETRY_INTERVAL * 1000); } catch (InterruptedException e) { LOG.error(e); } LOG.info("Checking if config is available"); if (logFeederFilterWrapper != null) { LOG.info("Config is available"); return; } } } }
/* * This file is part of RebornCore, licensed under the MIT License (MIT). * * Copyright (c) 2021 TeamReborn * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package reborncore.common.blockentity; import net.minecraft.block.BlockState; import net.minecraft.block.Blocks; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Direction; import net.minecraft.world.BlockView; import reborncore.client.multiblock.HologramRenderer; import java.util.function.BiPredicate; /** * Writes a multiblock for either verification or hologram rendering * * @author ramidzkh * @see MultiblockVerifier * @see HologramRenderer */ public interface MultiblockWriter { /** * Adds a block to the multiblock * * @param x {@code int} X * @param y {@code int} Y * @param z {@code int} Z * @param predicate {@link BiPredicate} Predicate of the position * @param state {@link BlockState} The state for the hologram * @return {@link MultiblockWriter} This. Useful for chaining */ MultiblockWriter add(int x, int y, int z, BiPredicate<BlockView, BlockPos> predicate, BlockState state); /** * Fills a section between (ax, ay, az) to (bx, by, bz) * * @param ax {@code int} X of the first point * @param ay {@code int} Y of the first point * @param az {@code int} Z of the first point * @param bx {@code int} X of the second point * @param by {@code int} X of the second point * @param bz {@code int} Z of the second point * @param predicate {@link BiPredicate} Predicate of the position * @param state {@link BlockState} The state for the hologram * @return {@link MultiblockWriter} This. Useful for chaining */ default MultiblockWriter fill(int ax, int ay, int az, int bx, int by, int bz, BiPredicate<BlockView, BlockPos> predicate, BlockState state) { for (int x = ax; x < bx; x++) { for (int y = ay; y < by; y++) { for (int z = az; z < bz; z++) { add(x, y, z, predicate, state); } } } return this; } /** * Fills the outer ring of (0, 0, 0) to (pX, pY, pZ) through the axis, using the <code>predicate</code> and * <code>state</code>. The inside of the ring uses <code>holePredicate</code> and <code>holeHologramState</code> * * @param through {@link Direction.Axis} The axis to go through * @param pX {@code int} Size on the X axis * @param pY {@code int} Size on the Y axis * @param pZ {@code int} Size on the Z axis * @param predicate {@link BiPredicate} Predicate for the ring * @param state {@link BlockState} The state for the hologram * @param holePredicate {@link BiPredicate} Predicate for the hole * @param holeHologramState {@link BlockState} The hole state for the hologram * @return {@link MultiblockWriter} This. Useful for chaining */ default MultiblockWriter ring(Direction.Axis through, int pX, int pY, int pZ, BiPredicate<BlockView, BlockPos> predicate, BlockState state, BiPredicate<BlockView, BlockPos> holePredicate, BlockState holeHologramState) { if (holePredicate == null) { holePredicate = predicate.negate(); } if (holeHologramState == null) { holeHologramState = Blocks.AIR.getDefaultState(); } if (through == Direction.Axis.X) { for (int y = 0; y < pY; y++) { for (int z = 0; z < pZ; z++) { if ((y == 0 || y == (pY - 1)) || (z == 0 || z == (pZ - 1))) { add(pX, y, z, predicate, state); } else { add(pX, y, z, holePredicate, holeHologramState); } } } } else if (through == Direction.Axis.Y) { for (int x = 0; x < pX; x++) { for (int z = 0; z < pZ; z++) { if ((x == 0 || x == (pX - 1)) || (z == 0 || z == (pZ - 1))) { add(x, pY, z, predicate, state); } else { add(x, pY, z, holePredicate, holeHologramState); } } } } else if (through == Direction.Axis.Z) { for (int x = 0; x < pX; x++) { for (int y = 0; y < pY; y++) { if ((x == 0 || x == (pX - 1)) || (y == 0 || y == (pY - 1))) { add(x, y, pZ, predicate, state); } else { add(x, y, pZ, holePredicate, holeHologramState); } } } } return this; } default MultiblockWriter ringWithAir(Direction.Axis through, int x, int y, int z, BiPredicate<BlockView, BlockPos> predicate, BlockState state) { return ring(through, x, y, z, predicate, state, (view, pos) -> view.getBlockState(pos).getBlock() == Blocks.AIR, Blocks.AIR.getDefaultState()); } default MultiblockWriter add(int x, int y, int z, BlockState state) { return this.add(x, y, z, (view, pos) -> view.getBlockState(pos) == state, state); } default MultiblockWriter fill(int ax, int ay, int az, int bx, int by, int bz, BlockState state) { return fill(ax, ay, az, bx, by, bz, (view, pos) -> view.getBlockState(pos) == state, state); } default MultiblockWriter ring(Direction.Axis through, int x, int y, int z, BlockState state, BlockState holeState) { return ring(through, x, y, z, (view, pos) -> view.getBlockState(pos) == state, state, (view, pos) -> view.getBlockState(pos) == holeState, holeState); } default MultiblockWriter ringWithAir(Direction.Axis through, int x, int y, int z, BlockState state) { return ringWithAir(through, x, y, z, (view, pos) -> view.getBlockState(pos) == state, state); } default MultiblockWriter translate(int offsetX, int offsetY, int offsetZ) { return (x, y, z, predicate, state) -> add(offsetX + x, offsetY + y, offsetZ + z, predicate, state); } default MultiblockWriter rotate() { return (x, y, z, predicate, state) -> add(-z, y, x, predicate, state); } default MultiblockWriter rotate(Direction direction) { MultiblockWriter w = this; switch (direction) { case NORTH: w = w.rotate(); case WEST: w = w.rotate(); case SOUTH: w = w.rotate(); } return w; } /** * A writer which prints the hologram to {@link System#out} */ record DebugWriter(MultiblockWriter writer) implements MultiblockWriter { @Override public MultiblockWriter add(int x, int y, int z, BiPredicate<BlockView, BlockPos> predicate, BlockState state) { System.out.printf("\t%d\t%d\t%d\t%s\n", x, y, z, state.getBlock()); if (writer != null) { writer.add(x, y, z, predicate, state); } return this; } } /** * A writer which verifies the positions of each block */ class MultiblockVerifier implements MultiblockWriter { private final BlockPos relative; private final BlockView view; private boolean valid = true; public MultiblockVerifier(BlockPos relative, BlockView view) { this.relative = relative; this.view = view; } public boolean isValid() { return valid; } @Override public MultiblockWriter add(int x, int y, int z, BiPredicate<BlockView, BlockPos> predicate, BlockState state) { if (valid) { valid = predicate.test(view, relative.add(x, y, z)); } return this; } } }
/* StatisticsSummary * * $Id: StatisticsSummary.java 6428 2009-08-04 01:22:52Z gojomo $$ * * Created on July 27, 2006 * * Copyright (C) 2009 Internet Archive. * * This file is part of the Heritrix web crawler (crawler.archive.org). * * Heritrix is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * any later version. * * Heritrix is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser Public License for more details. * * You should have received a copy of the GNU Lesser Public License * along with Heritrix; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.archive.crawler.admin; import java.io.File; import java.io.FileReader; import java.io.BufferedReader; import java.io.IOException; import java.util.Comparator; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; import java.util.logging.Logger; /** * This class provides descriptive statistics of a finished crawl job by * using the crawl report files generated by StatisticsTracker. Any formatting * changes to the way StatisticsTracker writes to the summary crawl reports will * require changes to this class. * <p> * The following statistics are accessible from this class: * <ul> * <li> Successfully downloaded documents per fetch status code * <li> Successfully downloaded documents per document mime type * <li> Amount of data per mime type * <li> Successfully downloaded documents per host * <li> Amount of data per host * <li> Successfully downloaded documents per top-level domain name (TLD) * <li> Disposition of all seeds * <li> Successfully downloaded documents per host per source * </ul> * * <p>TODO: Make it so summarizing is not done all in RAM so we avoid * OOME. * * @author Frank McCown * * @see org.archive.crawler.admin.StatisticsTracker */ public class StatisticsSummary { /** * Messages from the StatisticsSummary. */ private final static Logger logger = Logger.getLogger(StatisticsSummary.class.getName()); private boolean stats = true; /** Crawl job whose summary we want to view */ private CrawlJob cjob; protected long totalDnsStatusCodeDocuments = 0; protected long totalStatusCodeDocuments = 0; protected long totalFileTypeDocuments = 0; protected long totalMimeTypeDocuments = 0; protected long totalDnsMimeTypeDocuments = 0; protected long totalDnsHostDocuments = 0; protected long totalHostDocuments = 0; protected long totalMimeSize = 0; protected long totalDnsMimeSize = 0; protected long totalHostSize = 0; protected long totalDnsHostSize = 0; protected long totalTldDocuments = 0; protected long totalTldSize = 0; protected long totalHosts = 0; protected String durationTime; protected String processedDocsPerSec; protected String bandwidthKbytesPerSec; protected String totalDataWritten; /** Keep track of the file types we see (mime type -> count) */ protected Hashtable<String,AtomicLong> mimeTypeDistribution = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> mimeTypeBytes = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> mimeTypeDnsDistribution = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> mimeTypeDnsBytes = new Hashtable<String,AtomicLong>(); /** Keep track of status codes */ protected Hashtable<String,AtomicLong> statusCodeDistribution = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> dnsStatusCodeDistribution = new Hashtable<String,AtomicLong>(); /** Keep track of hosts */ protected Hashtable<String,AtomicLong> hostsDistribution = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> hostsBytes = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> hostsDnsDistribution = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> hostsDnsBytes = new Hashtable<String,AtomicLong>(); /** Keep track of TLDs */ protected Hashtable<String,AtomicLong> tldDistribution = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> tldBytes = new Hashtable<String,AtomicLong>(); protected Hashtable<String,AtomicLong> tldHostDistribution = new Hashtable<String,AtomicLong>(); /** Keep track of processed seeds */ protected transient Map<String,SeedRecord> processedSeedsRecords = new Hashtable<String,SeedRecord>(); /** * Constructor * * @param cjob * Completed crawl job */ public StatisticsSummary(CrawlJob cjob) { this.cjob = cjob; // Read all stats for this crawl job this.stats = calculateStatusCodeDistribution(); if (calculateMimeTypeDistribution()) { this.stats = true; } if (calculateHostsDistribution()) { this.stats = true; } if (readCrawlReport()) { this.stats = true; } if (readSeedReport()) { this.stats = true; } } /** * Increment a counter for a key in a given HashMap. Used for various * aggregate data. * * @param map The HashMap * @param key The key for the counter to be incremented, if it does not * exist it will be added (set to 1). If null it will * increment the counter "unknown". */ protected static void incrementMapCount(Map<String,AtomicLong> map, String key) { incrementMapCount(map,key,1); } /** * Increment a counter for a key in a given HashMap by an arbitrary amount. * Used for various aggregate data. The increment amount can be negative. * * @param map * The HashMap * @param key * The key for the counter to be incremented, if it does not * exist it will be added (set to equal to * <code>increment</code>). * If null it will increment the counter "unknown". * @param increment * The amount to increment counter related to the * <code>key</code>. */ protected static void incrementMapCount(Map<String,AtomicLong> map, String key, long increment) { if (key == null) { key = "unknown"; } // FIXME: for true safety this may need to use ConcurrenMap methods AtomicLong lw = map.get(key); if(lw == null) { map.put(key, new AtomicLong(increment)); } else { lw.addAndGet(increment); } } /** Returns a HashMap that contains information about distributions of * encountered mime types. Key/value pairs represent * mime type -> count. * <p> * <b>Note:</b> All the values are wrapped with a * {@link AtomicLong AtomicLong} * @return mimeTypeDistribution */ public Hashtable getMimeDistribution() { return mimeTypeDistribution; } public long getTotalMimeTypeDocuments() { return totalMimeTypeDocuments; } public long getTotalDnsMimeTypeDocuments() { return totalDnsMimeTypeDocuments; } public long getTotalMimeSize() { return totalMimeSize; } public long getTotalDnsMimeSize() { return totalDnsMimeSize; } /** * Return a HashMap representing the distribution of HTTP status codes for * successfully fetched curis, as represented by a hashmap where key -&gt; * val represents (string)code -&gt; (integer)count. * * <b>Note: </b> All the values are wrapped with a * {@link AtomicLong AtomicLong} * * @return statusCodeDistribution */ public Hashtable getStatusCodeDistribution() { return statusCodeDistribution; } /** * Return a HashMap representing the distribution of DNS status codes for * successfully fetched curis, as represented by a hashmap where key -&gt; * val represents (string)code -&gt; (integer)count. * * <b>Note: </b> All the values are wrapped with a * {@link AtomicLong AtomicLong} * * @return dnsStatusCodeDistribution */ public Hashtable getDnsStatusCodeDistribution() { return dnsStatusCodeDistribution; } public Hashtable getDnsMimeDistribution() { return mimeTypeDnsDistribution; } public long getTotalDnsStatusCodeDocuments() { return totalDnsStatusCodeDocuments; } public long getTotalStatusCodeDocuments() { return totalStatusCodeDocuments; } public long getTotalHostDocuments() { return totalHostDocuments; } public long getTotalDnsHostDocuments() { return totalDnsHostDocuments; } public Hashtable getHostsDnsDistribution() { return hostsDnsDistribution; } public long getTotalHostDnsDocuments() { return totalDnsHostDocuments; } public long getTotalHostSize() { return totalHostSize; } public long getTotalDnsHostSize() { return totalDnsHostSize; } public Hashtable getTldDistribution() { return tldDistribution; } public Hashtable getTldBytes() { return tldBytes; } public long getTotalTldDocuments() { return totalTldDocuments; } public long getTotalTldSize() { return totalTldSize; } public Hashtable getTldHostDistribution() { return tldHostDistribution; } public long getTotalHosts() { return totalHosts; } public String getDurationTime() { return durationTime; } public String getProcessedDocsPerSec() { return processedDocsPerSec; } public String getBandwidthKbytesPerSec() { return bandwidthKbytesPerSec; } public String getTotalDataWritten() { return totalDataWritten; } /** * Sort the entries of the given HashMap in descending order by their * values, which must be <code>AtomicLong</code>s. * <p> * Elements are sorted by value from largest to smallest. Equal values are * sorted in an arbitrary, but consistent manner by their keys. Only items * with identical value and key are considered equal. * * If the passed-in map requires access to be synchronized, the caller * should ensure this synchronization. * * @param mapOfAtomicLongValues * Assumes values are AtomicLongs. * @return a sorted set containing the same elements as the map. */ public TreeMap<String,AtomicLong> getReverseSortedCopy( final Map<String,AtomicLong> mapOfAtomicLongValues) { TreeMap<String,AtomicLong> sortedMap = new TreeMap<String,AtomicLong>( new Comparator<String>() { public int compare(String e1, String e2) { long firstVal = mapOfAtomicLongValues.get(e1).get(); long secondVal = mapOfAtomicLongValues.get(e2).get(); if (firstVal < secondVal) { return 1; } if (secondVal < firstVal) { return -1; } // If the values are the same, sort by keys. return e1.compareTo(e2); } }); try { sortedMap.putAll(mapOfAtomicLongValues); } catch (UnsupportedOperationException e) { for (String key: mapOfAtomicLongValues.keySet()) { sortedMap.put(key, mapOfAtomicLongValues.get(key)); } } return sortedMap; } /** * Get the number of hosts with a particular TLD. * @param tld * top-level domain name * @return Total crawled hosts */ public long getHostsPerTld(String tld) { AtomicLong lw = (AtomicLong)tldHostDistribution.get(tld); return (lw == null ? 0 : lw.get()); } /** * Read status code distribution from responsecode-report.txt. * DNS and HTTP status codes are separated when read. * @return True if we found some stats. */ private boolean calculateStatusCodeDistribution() { // Read from responsecode-report.txt File f = new File(cjob.getDirectory(), "responsecode-report.txt"); if (!f.exists()) { return false; } BufferedReader br = null; try { FileReader reader = new FileReader(f); br = new BufferedReader(reader); String line = br.readLine(); // Ignore heading line = br.readLine(); while (line != null) { // Get status code and # urls which are seperated by a space String[] items = line.split(" "); if (items.length < 2) { logger.log(Level.WARNING, "Unexpected formatting on line [" + line + "]"); } else { // See if DNS or HTTP status code if (items[0].length() < 3) { // DNS status code long total = Long.parseLong(items[1]); dnsStatusCodeDistribution.put(items[0], new AtomicLong(total)); totalDnsStatusCodeDocuments += total; } else { // HTTP status code long total = Long.parseLong(items[1]); statusCodeDistribution.put(items[0], new AtomicLong(total)); totalStatusCodeDocuments += total; } } line = br.readLine(); } } catch (IOException e) { logger.log(Level.SEVERE, "Unable to read " + f.getAbsolutePath(), e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Closing " + f.getAbsolutePath(), e); } } } return true; } /** * Read MIME type data from mimetype-report.txt. * MIME type of text/dns is separated from other MIME types. * @return True if we found some stats. */ private boolean calculateMimeTypeDistribution() { File f = new File(cjob.getDirectory(), "mimetype-report.txt"); if (!f.exists()) { return false; } BufferedReader br = null; try { FileReader reader = new FileReader(f); br = new BufferedReader(reader); String line = br.readLine(); // Ignore heading line = br.readLine(); while (line != null) { // Get num urls, num bytes, and MIME type (seperated by a space) // Example: 12 134279 text/html String[] items = line.split(" "); if (items.length < 3) { logger.log(Level.WARNING, "Unexpected formatting on line [" + line + "]"); } else { long total = Long.parseLong(items[0]); long bytes = Long.parseLong(items[1]); String mime = items[2]; // Seperate DNS reconrds from HTTP if (mime.equalsIgnoreCase("text/dns")) { mimeTypeDnsDistribution.put(mime, new AtomicLong(total)); mimeTypeDnsBytes.put(mime, new AtomicLong(bytes)); totalDnsMimeTypeDocuments += total; totalDnsMimeSize += bytes; } else { mimeTypeDistribution.put(mime, new AtomicLong(total)); mimeTypeBytes.put(mime, new AtomicLong(bytes)); totalMimeTypeDocuments += total; totalMimeSize += bytes; } } line = br.readLine(); } } catch (IOException e) { logger.log(Level.SEVERE, "Reading " + f.getAbsolutePath(), e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Closing " + f.getAbsolutePath(), e); } } } return true; } /** * Read number of URLs and total bytes for each host name from * hosts-report.txt. * Host name of "dns:" is separated from others. * @return true if stats found. */ private boolean calculateHostsDistribution() { File f = new File(cjob.getDirectory(), "hosts-report.txt"); if (!f.exists()) { return false; } BufferedReader br = null; try { FileReader reader = new FileReader(f); br = new BufferedReader(reader); String line = br.readLine(); // Ignore heading line = br.readLine(); while (line != null) { // Get num urls, num bytes, and host name (seperated by a space) // Example: 9 7468 www.blogger.com String[] items = line.split(" "); if (items.length < 3) { logger.log(Level.WARNING, "Unexpected formatting on line [" + line + "]"); } else { long total = Long.parseLong(items[0]); long bytes = Long.parseLong(items[1]); String host = items[2]; // Seperate DNS reconrds from HTTP if (host.startsWith("dns:", 0)) { hostsDnsDistribution.put(host, new AtomicLong(total)); hostsDnsBytes.put(host, new AtomicLong(bytes)); totalDnsHostDocuments += total; totalDnsHostSize += bytes; } else { hostsDistribution.put(host, new AtomicLong(total)); hostsBytes.put(host, new AtomicLong(bytes)); totalHostDocuments += total; totalHostSize += bytes; // Count top level domain (TLD) String tld = host.substring(host.lastIndexOf('.')+1); incrementMapCount(tldDistribution, tld, total); incrementMapCount(tldBytes, tld, bytes); incrementMapCount(tldHostDistribution, tld); totalTldDocuments += total; totalTldSize += bytes; totalHosts++; } } line = br.readLine(); } } catch (IOException e) { logger.log(Level.SEVERE, "Reading " + f.getAbsolutePath(), e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Closing " + f.getAbsolutePath(), e); } } } return true; } /** * Returns the accumulated number of bytes downloaded from a given host. * @param host name of the host * @return the accumulated number of bytes downloaded from a given host */ public long getBytesPerHost(String host) { long bytes = -1; bytes = host != null && host.startsWith("dns:", 0) ? ((AtomicLong)hostsDnsBytes.get(host)).get() : ((AtomicLong)hostsBytes.get(host)).get(); return bytes; } /** * Returns the total number of bytes downloaded for a given TLD. * @param tld TLD * @return the total number of bytes downloaded for a given TLD */ public long getBytesPerTld(String tld) { AtomicLong lw = (AtomicLong)tldBytes.get(tld); return (lw == null ? 0 : lw.get()); } /** * Returns the accumulated number of bytes from files of a given file type. * @param filetype Filetype to check. * @return the accumulated number of bytes from files of a given mime type */ public long getBytesPerMimeType(String filetype) { long bytes = -1; if (filetype != null) { if (filetype.equals("text/dns")) { bytes = mimeTypeDnsBytes.get(filetype) == null ? 0 : ((AtomicLong)mimeTypeDnsBytes.get(filetype)).get(); } else { bytes = mimeTypeBytes.get(filetype) == null ? 0 : ((AtomicLong)mimeTypeBytes.get(filetype)).get(); } } return bytes; } /** * Reads duration time, processed docs/sec, bandwidth, and total size * of crawl from crawl-report.txt. * @return true if stats found. */ public boolean readCrawlReport() { File f = new File(cjob.getDirectory(), "crawl-report.txt"); if (!f.exists()) { return false; } BufferedReader br = null; try { FileReader reader = new FileReader(f); br = new BufferedReader(reader); String line = br.readLine(); while (line != null) { if (line.startsWith("Duration Time")) { durationTime = line.substring(line.indexOf(':')+1); } else if (line.startsWith("Processed docs/sec")) { processedDocsPerSec = line.substring(line.indexOf(':')+1); } else if (line.startsWith("Bandwidth in Kbytes/sec")) { bandwidthKbytesPerSec = line.substring(line.indexOf(':')+1); } else if (line.startsWith("Total Raw Data Size in Bytes")) { totalDataWritten = line.substring(line.indexOf(':')+1); } line = br.readLine(); } } catch (IOException e) { logger.log(Level.SEVERE, "Reading " + f.getAbsolutePath(), e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Failed close of " + f.getAbsolutePath(), e); } } } return true; } /** * Returns sorted Iterator of seeds records based on status code. * @return sorted Iterator of seeds records */ public Iterator<SeedRecord> getSeedRecordsSortedByStatusCode() { TreeSet<SeedRecord> sortedSet = new TreeSet<SeedRecord>( new Comparator<SeedRecord>() { public int compare(SeedRecord sr1, SeedRecord sr2) { int code1 = sr1.getStatusCode(); int code2 = sr2.getStatusCode(); if (code1 == code2) { // If the values are equal, sort by URIs. return sr1.getUri().compareTo(sr2.getUri()); } // mirror and shift the nubmer line so as to // place zero at the beginning, then all negatives // in order of ascending absolute value, then all // positives descending code1 = -code1 - Integer.MAX_VALUE; code2 = -code2 - Integer.MAX_VALUE; return new Integer(code1).compareTo(new Integer(code2)); } }); for (SeedRecord sr: processedSeedsRecords.values()) { sortedSet.add(sr); } return sortedSet.iterator(); } /** * Reads seed data from seeds-report.txt. * @return True if stats found. */ private boolean readSeedReport() { File f = new File(cjob.getDirectory(), "seeds-report.txt"); if (!f.exists()) { return false; } BufferedReader br = null; try { FileReader reader = new FileReader(f); br = new BufferedReader(reader); // Ignore heading: [code] [status] [seed] [redirect] String line = br.readLine(); line = br.readLine(); while (line != null) { // Example lines: // 302 CRAWLED http://www.ashlandcitytimes.com/ http://www.ashlandcitytimes.com/apps/pbcs.dll/section?Category=MTCN01 // 200 CRAWLED http://noleeo.com/ String[] items = line.split(" "); if (items.length < 3) { logger.log(Level.WARNING, "Unexpected formatting on line [" + line + "]"); } else { String statusCode = items[0]; String crawlStatus = items[1]; String seed = items[2]; String redirect = items.length > 3 ? items[3] : null; // All values should be CRAWLED or NOTCRAWLED if (crawlStatus.equals("CRAWLED")) { crawlStatus =org.archive.crawler.framework.StatisticsTracking.SEED_DISPOSITION_SUCCESS; } else { crawlStatus = org.archive.crawler.framework.StatisticsTracking.SEED_DISPOSITION_FAILURE; } SeedRecord sr = new SeedRecord(seed, crawlStatus, Integer.parseInt(statusCode), redirect); processedSeedsRecords.put(seed, sr); } line = br.readLine(); } } catch (IOException e) { logger.log(Level.SEVERE, "Reading " + f.getAbsolutePath(), e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Closing " + f.getAbsolutePath(), e); } } } return true; } /** * Return a copy of the hosts distribution in reverse-sorted * (largest first) order. * * @return SortedMap of hosts distribution */ public SortedMap getReverseSortedHostsDistribution() { return getReverseSortedCopy(hostsDistribution); } /** * @return True if we compiled stats, false if none to compile (e.g. * there are no reports files on disk). */ public boolean isStats() { return this.stats; } }
package cryodex.modules.mus; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.JOptionPane; import cryodex.CryodexController; import cryodex.CryodexController.Modules; import cryodex.Language; import cryodex.Main; import cryodex.Player; import cryodex.modules.Module; import cryodex.modules.Tournament; import cryodex.xml.XMLObject; import cryodex.xml.XMLUtils; import cryodex.xml.XMLUtils.Element; public class MusTournament implements XMLObject, Tournament { public enum InitialSeedingEnum { RANDOM, BY_GROUP, IN_ORDER; } public enum TournamentTypeEnum { SWISS, SINGLE, DOUBLE, TRIPLE; } private final List<MusRound> rounds; private List<MusPlayer> players; private final InitialSeedingEnum seedingEnum; private final MusTournamentGUI tournamentGUI; private String name; private final Integer points; private List<Integer> escalationPoints; private TournamentTypeEnum tournamentType = TournamentTypeEnum.SWISS; public MusTournament(Element tournamentElement) { this.players = new ArrayList<>(); this.rounds = new ArrayList<>(); seedingEnum = InitialSeedingEnum.RANDOM; tournamentGUI = new MusTournamentGUI(this); String playerIDs = tournamentElement.getStringFromChild("PLAYERS"); Module m = Modules.getModuleByName(getModuleName()); for (String s : playerIDs.split(",")) { Player p = CryodexController.getPlayerByID(s); if (p != null) { MusPlayer xp = (MusPlayer) p.getModuleInfoByModule(m); if (xp != null) { players.add(xp); } } } Element roundElement = tournamentElement.getChild("ROUNDS"); for (Element e : roundElement.getChildren()) { rounds.add(new MusRound(e, this)); } name = tournamentElement.getStringFromChild("NAME"); points = tournamentElement.getIntegerFromChild("POINTS"); String tournamentTypeString = tournamentElement.getStringFromChild("TOURNAMENTTYPE"); if (tournamentTypeString != null && tournamentTypeString.isEmpty() == false) { tournamentType = TournamentTypeEnum.valueOf(tournamentTypeString); } String escalationPointsString = tournamentElement.getStringFromChild("ESCALATIONPOINTS"); if (escalationPointsString != null && escalationPointsString.isEmpty() == false) { escalationPoints = new ArrayList<Integer>(); for (String s : escalationPointsString.split(",")) { escalationPoints.add(new Integer(s)); } } int counter = 1; for (MusRound r : rounds) { if (r.isSingleElimination() && isElimination() == false) { getTournamentGUI().getRoundTabbedPane().addSingleEliminationTab(r.getMatches().size() * 2, r.getPanel()); } else { getTournamentGUI().getRoundTabbedPane().addSwissTab(counter, r.getPanel()); counter++; } } getTournamentGUI().getRankingTable().setPlayers(getAllXWingPlayers()); } public MusTournament(String name, List<MusPlayer> players, InitialSeedingEnum seedingEnum, Integer points, List<Integer> escalationPoints, TournamentTypeEnum tournamentType) { this.name = name; this.players = new ArrayList<>(players); this.rounds = new ArrayList<>(); this.seedingEnum = seedingEnum; this.points = points; this.escalationPoints = escalationPoints; this.tournamentType = tournamentType; tournamentGUI = new MusTournamentGUI(this); } public MusRound getLatestRound() { if (rounds == null || rounds.isEmpty()) { return null; } else { return rounds.get(rounds.size() - 1); } } public int getRoundNumber(MusRound round) { int count = 0; for (MusRound r : rounds) { count++; if (r == round) { return count; } } return 0; } public MusRound getRound(int i) { if (rounds == null) { return null; } else { return rounds.get(i); } } public MusRound getSelectedRound() { if (rounds == null) { return null; } else { return getAllRounds().get(getTournamentGUI().getRoundTabbedPane().getSelectedIndex()); } } public List<MusRound> getAllRounds() { return rounds; } @Override public int getRoundCount() { if (rounds == null) { return 0; } else { return rounds.size(); } } @Override public void setPlayers(List<Player> players) { List<MusPlayer> xwPlayers = new ArrayList<>(); for (Player p : players) { MusPlayer xp = new MusPlayer(p); xwPlayers.add(xp); } setXWingPlayer(xwPlayers); } @Override public List<Player> getPlayers() { List<Player> players = new ArrayList<Player>(); for (MusPlayer xp : getXWingPlayers()) { players.add(xp.getPlayer()); } return players; } public List<MusPlayer> getXWingPlayers() { return players; } public void setXWingPlayer(List<MusPlayer> players) { this.players = players; } /** * Returns any players have have played at least one match. This calls back * dropped players into the list. * * @return */ public Set<MusPlayer> getAllXWingPlayers() { // TreeSets and Head To Head comparisons can have problems. // Do not use them together. Set<MusPlayer> allPlayers = new TreeSet<MusPlayer>( new MusComparator(this, MusComparator.rankingCompareNoHeadToHead)); for (MusRound r : getAllRounds()) { for (MusMatch m : r.getMatches()) { if (m.isBye()) { allPlayers.add(m.getPlayer1()); } else { allPlayers.add(m.getPlayer1()); if (m.getPlayer2() != null) { allPlayers.add(m.getPlayer2()); } } } } allPlayers.addAll(players); return allPlayers; } @Override public Set<Player> getAllPlayers() { Set<Player> players = new TreeSet<Player>(); for (MusPlayer xp : getAllXWingPlayers()) { players.add(xp.getPlayer()); } return players; } @Override public MusTournamentGUI getTournamentGUI() { return tournamentGUI; } @Override public String getName() { return name; } public Integer getPoints() { return points; } public List<Integer> getEscalationPoints() { return escalationPoints; } @Override public void setName(String name) { this.name = name; } @Override public void updateVisualOptions() { if (CryodexController.isLoading == false) { for (MusRound r : getAllRounds()) { r.getPanel().resetGamePanels(true); } } } @Override public boolean generateNextRound() { // All matches must have a result filled in if (getLatestRound().isComplete() == false) { JOptionPane.showMessageDialog(Main.getInstance(), Language.round_incomplete); return false; } // Single elimination checks if (getLatestRound().isSingleElimination()) { generateEliminationRound(getAllRounds().size() + 1); // If there was only one match then there is no reason to create a // new round. // if (getLatestRound().getMatches().size() == 1) { // JOptionPane // .showMessageDialog(Main.getInstance(), // Language.final_round); // return false; // } // // if (getLatestRound().isValid(true) == false) { // JOptionPane // .showMessageDialog( // Main.getInstance(), // Language.round_invalid); // return false; // } // // generateSingleEliminationMatches(getLatestRound().getMatches() // .size()); } else { // Regular swiss round checks if (getLatestRound().isValid(false) == false) { JOptionPane.showMessageDialog(Main.getInstance(), Language.round_invalid); return false; } generateRound(getAllRounds().size() + 1); } return true; } @Override public void cancelRound(int roundNumber) { if (rounds.size() >= roundNumber) { // If we are generating a past round. Clear all existing rounds that // will be erased. while (rounds.size() >= roundNumber) { int index = rounds.size() - 1; MusRound roundToRemove = rounds.get(index); for (MusMatch m : roundToRemove.getMatches()) { m.setWinner(null); m.setBye(false); m.setPlayer1(null); m.setPlayer2(null); m.setPlayer1PointsDestroyed(null); m.setPlayer2PointsDestroyed(null); } rounds.remove(roundToRemove); getTournamentGUI().getRoundTabbedPane().remove(index); } } } @Override public void generateRound(int roundNumber) { // if trying to skip a round...stop it if (roundNumber > rounds.size() + 1) { throw new IllegalArgumentException(); } cancelRound(roundNumber); if (tournamentType == TournamentTypeEnum.DOUBLE || tournamentType == TournamentTypeEnum.TRIPLE) { for (MusPlayer p : new ArrayList<MusPlayer>(getXWingPlayers())) { if (tournamentType == TournamentTypeEnum.DOUBLE && p.getLosses(this) >= 2) { dropPlayer(p.getPlayer()); } if (tournamentType == TournamentTypeEnum.TRIPLE && p.getLosses(this) >= 3) { dropPlayer(p.getPlayer()); } } } if (tournamentType == TournamentTypeEnum.SINGLE) { generateEliminationRound(roundNumber); } else { generateSwissRound(roundNumber); } getTournamentGUI().getRankingTable().setPlayers(getAllXWingPlayers()); } private void generateEliminationRound(int roundNumber) { List<MusMatch> matches = new ArrayList<MusMatch>(); if (tournamentType == TournamentTypeEnum.SINGLE) { matches = MusEliminationMatchBuilder.getBracketMatches(this, getXWingPlayers(), 0); } // } else if (tournamentType == TournamentTypeEnum.DOUBLE) { // matches = MusEliminationMatchBuilder.getBracketMatches(this, // getXWingPlayers(), 0); // // List<MusMatch> matches2 = new ArrayList<MusMatch>(); // matches2 = MusEliminationMatchBuilder.getBracketMatches(this, // getXWingPlayers(), 1); // // if (matches2.size() == 1 && matches.size() == 1) { // if (matches2.get(0).isBye() && matches.get(0).isBye()) { // matches.get(0).setPlayer2(matches2.get(0).getPlayer1()); // matches.get(0).setBye(false); // matches2.clear(); // } // } // // matches.addAll(matches2); // // } else if (tournamentType == TournamentTypeEnum.TRIPLE) { // matches = MusEliminationMatchBuilder.getBracketMatches(this, // getXWingPlayers(), 0); // // List<MusMatch> matches2 = new ArrayList<MusMatch>(); // matches2 = MusEliminationMatchBuilder.getBracketMatches(this, // getXWingPlayers(), 1); // // List<MusMatch> matches3 = new ArrayList<MusMatch>(); // matches3 = MusEliminationMatchBuilder.getBracketMatches(this, // getXWingPlayers(), 2); // // if (matches3.size() == 1 && matches2.size() == 1) { // if (matches3.get(0).isBye() && matches2.get(0).isBye()) { // matches2.get(0).setPlayer2(matches3.get(0).getPlayer1()); // matches2.get(0).setBye(false); // matches3.clear(); // } // } // if (matches2.size() == 1 && matches.size() == 1 && matches3.size() == // 0) { // if (matches2.get(0).isBye() && matches.get(0).isBye()) { // matches.get(0).setPlayer2(matches2.get(0).getPlayer1()); // matches.get(0).setBye(false); // matches2.clear(); // } // } // if (matches3.size() == 1 && matches.size() == 1 && matches2.size() == // 0) { // if (matches3.get(0).isBye() && matches.get(0).isBye()) { // matches.get(0).setPlayer2(matches3.get(0).getPlayer1()); // matches.get(0).setBye(false); // matches3.clear(); // } // } // // matches.addAll(matches2); // matches.addAll(matches3); // } MusRound r = new MusRound(matches, this, roundNumber); r.setSingleElimination(true); rounds.add(r); getTournamentGUI().getRoundTabbedPane().addSwissTab(roundNumber, r.getPanel()); } private void generateSwissRound(int roundNumber) { List<MusMatch> matches; if (roundNumber == 1) { matches = new ArrayList<MusMatch>(); List<MusPlayer> tempList = new ArrayList<>(); tempList.addAll(getXWingPlayers()); List<MusPlayer> firstRoundByePlayers = new ArrayList<>(); for (MusPlayer p : tempList) { if (p.isFirstRoundBye()) { firstRoundByePlayers.add(p); } } tempList.removeAll(firstRoundByePlayers); if (seedingEnum == InitialSeedingEnum.IN_ORDER) { while (tempList.isEmpty() == false) { MusPlayer player1 = tempList.get(0); MusPlayer player2 = null; tempList.remove(0); if (tempList.isEmpty() == false) { player2 = tempList.get(0); tempList.remove(0); } MusMatch match = new MusMatch(player1, player2); matches.add(match); } } else if (seedingEnum == InitialSeedingEnum.RANDOM) { Collections.shuffle(tempList); while (tempList.isEmpty() == false) { MusPlayer player1 = tempList.get(0); MusPlayer player2 = tempList.get(tempList.size() - 1); tempList.remove(player1); if (player1 == player2) { player2 = null; } else { tempList.remove(player2); } MusMatch match = new MusMatch(player1, player2); matches.add(match); } } else if (seedingEnum == InitialSeedingEnum.BY_GROUP) { Map<String, List<MusPlayer>> playerMap = new HashMap<String, List<MusPlayer>>(); // Add players to map for (MusPlayer p : tempList) { List<MusPlayer> playerList = playerMap.get(p.getPlayer().getGroupName()); if (playerList == null) { playerList = new ArrayList<>(); String groupName = p.getPlayer().getGroupName() == null ? "" : p.getPlayer().getGroupName(); playerMap.put(groupName, playerList); } playerList.add(p); } // Shuffle up the lists List<String> seedValues = new ArrayList<>(playerMap.keySet()); Collections.shuffle(seedValues); // Shuffle each group list for (List<MusPlayer> list : playerMap.values()) { Collections.shuffle(list); } MusPlayer p1 = null; MusPlayer p2 = null; while (seedValues.isEmpty() == false) { int i = 0; while (i < seedValues.size()) { if (p1 == null) { p1 = playerMap.get(seedValues.get(i)).get(0); } else { p2 = playerMap.get(seedValues.get(i)).get(0); matches.add(new MusMatch(p1, p2)); p1 = null; p2 = null; } playerMap.get(seedValues.get(i)).remove(0); if (playerMap.get(seedValues.get(i)).isEmpty()) { seedValues.remove(i); } else { i++; } } Collections.shuffle(seedValues); } if (p1 != null) { matches.add(new MusMatch(p1, null)); } } for (MusPlayer p : firstRoundByePlayers) { matches.add(new MusMatch(p, null)); } } else { matches = getMatches(getXWingPlayers()); } MusRound r = new MusRound(matches, this, roundNumber); rounds.add(r); if (roundNumber == 1 && tournamentType == TournamentTypeEnum.SINGLE && (matches.size() == 1 || matches.size() == 2 || matches.size() == 4 || matches.size() == 8 || matches.size() == 16 || matches.size() == 32)) { r.setSingleElimination(true); getTournamentGUI().getRoundTabbedPane().addSwissTab(roundNumber, r.getPanel()); } else { getTournamentGUI().getRoundTabbedPane().addSwissTab(roundNumber, r.getPanel()); } } private List<MusMatch> getMatches(List<MusPlayer> userList) { List<MusMatch> matches = new ArrayList<MusMatch>(); List<MusPlayer> tempList = new ArrayList<MusPlayer>(); tempList.addAll(userList); Collections.sort(tempList, new MusComparator(this, MusComparator.pairingCompare)); MusMatch byeMatch = null; // Setup the bye match if necessary // The player to get the bye is the lowest ranked player who has not had // a bye yet or who has the fewest byes if (tempList.size() % 2 == 1) { MusPlayer byeUser = null; int byUserCounter = 1; int minByes = 0; try { while (byeUser == null || byeUser.getByes(this) > minByes || (byeUser.getMatches(this) != null && byeUser.getMatches(this).get(byeUser.getMatches(this).size() - 1).isBye())) { if (byUserCounter > tempList.size()) { minByes++; byUserCounter = 1; } byeUser = tempList.get(tempList.size() - byUserCounter); byUserCounter++; } } catch (ArrayIndexOutOfBoundsException e) { byeUser = tempList.get(tempList.size() - 1); } byeMatch = new MusMatch(byeUser, null); tempList.remove(byeUser); } matches = new MusRandomMatchGeneration(this, tempList).generateMatches(); if (MusMatch.hasDuplicate(matches)) { JOptionPane.showMessageDialog(Main.getInstance(), Language.duplicate_resolution_failure); } // Add the bye match at the end if (byeMatch != null) { matches.add(byeMatch); } return matches; } @Override public void generateSingleEliminationMatches(int cutSize, boolean special) { List<MusMatch> matches = new ArrayList<>(); List<MusMatch> matchesCorrected = new ArrayList<MusMatch>(); if (getLatestRound().isSingleElimination()) { List<MusMatch> lastRoundMatches = getLatestRound().getMatches(); for (int index = 0; index < lastRoundMatches.size(); index = index + 2) { MusMatch newMatch = new MusMatch(lastRoundMatches.get(index).getWinner(), lastRoundMatches.get(index + 1).getWinner()); matches.add(newMatch); } matchesCorrected = matches; } else { List<MusPlayer> tempList = new ArrayList<>(); tempList.addAll(getXWingPlayers()); Collections.sort(tempList, new MusComparator(this, MusComparator.rankingCompare)); tempList = tempList.subList(0, cutSize); if (special) { MusMatch match1 = new MusMatch(tempList.get(0), tempList.get(1)); MusMatch match2 = new MusMatch(tempList.get(2), tempList.get(3)); matchesCorrected.add(match1); matchesCorrected.add(match2); } else { while (tempList.isEmpty() == false) { MusPlayer player1 = tempList.get(0); MusPlayer player2 = tempList.get(tempList.size() - 1); tempList.remove(player1); if (player1 == player2) { player2 = null; } else { tempList.remove(player2); } MusMatch match = new MusMatch(player1, player2); matches.add(match); } switch (matches.size()) { case 4: matchesCorrected.add(matches.get(0)); matchesCorrected.add(matches.get(3)); matchesCorrected.add(matches.get(2)); matchesCorrected.add(matches.get(1)); break; case 8: matchesCorrected.add(matches.get(0)); matchesCorrected.add(matches.get(7)); matchesCorrected.add(matches.get(4)); matchesCorrected.add(matches.get(3)); matchesCorrected.add(matches.get(2)); matchesCorrected.add(matches.get(5)); matchesCorrected.add(matches.get(6)); matchesCorrected.add(matches.get(1)); break; default: matchesCorrected = matches; } } } MusRound r = new MusRound(matchesCorrected, this, null); r.setSingleElimination(true); rounds.add(r); getTournamentGUI().getRoundTabbedPane().addSingleEliminationTab(cutSize, r.getPanel()); CryodexController.saveData(); } @Override public StringBuilder appendXML(StringBuilder sb) { String playerString = ""; String seperator = ""; for (MusPlayer p : players) { playerString += seperator + p.getPlayer().getSaveId(); seperator = ","; } XMLUtils.appendObject(sb, "PLAYERS", playerString); XMLUtils.appendList(sb, "ROUNDS", "ROUND", getAllRounds()); String escalationString = ""; seperator = ""; if (escalationPoints != null) { for (Integer p : escalationPoints) { escalationString += seperator + p; seperator = ","; } } XMLUtils.appendObject(sb, "ESCALATIONPOINTS", escalationString); XMLUtils.appendObject(sb, "POINTS", points); XMLUtils.appendObject(sb, "NAME", name); XMLUtils.appendObject(sb, "MODULE", Modules.XWING.getName()); XMLUtils.appendObject(sb, "TOURNAMENTTYPE", tournamentType); return sb; } @Override public void startTournament() { generateRound(1); } @Override public void addPlayer(Player p) { for (MusRound r : getAllRounds()) { for (MusMatch m : r.getMatches()) { if (m.getPlayer1().getPlayer().equals(p)) { getXWingPlayers().add(m.getPlayer1()); return; } else if (m.getPlayer2() != null && m.getPlayer2().getPlayer().equals(p)) { getXWingPlayers().add(m.getPlayer2()); return; } } } MusPlayer xPlayer = new MusPlayer(p); getXWingPlayers().add(xPlayer); } @Override public void dropPlayer(Player p) { MusPlayer xPlayer = null; for (MusPlayer xp : getXWingPlayers()) { if (xp.getPlayer() == p) { xPlayer = xp; break; } } if (xPlayer != null) { getXWingPlayers().remove(xPlayer); } resetRankingTable(); } @Override public void resetRankingTable() { getTournamentGUI().getRankingTable().setPlayers(getAllXWingPlayers()); } @Override public Icon getIcon() { URL imgURL = MusTournament.class.getResource("x.png"); if (imgURL == null) { System.out.println("fail!!!!!!!!!!"); } ImageIcon icon = new ImageIcon(imgURL); return icon; } @Override public String getModuleName() { return Modules.XWING.getName(); } public boolean isElimination() { return tournamentType == TournamentTypeEnum.SINGLE || tournamentType == TournamentTypeEnum.DOUBLE || tournamentType == TournamentTypeEnum.TRIPLE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.math.BigInteger; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.util.Iterator; import java.util.Properties; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.execute.MutationState; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.schema.PIndexState; import org.apache.phoenix.schema.PTableType; import org.apache.phoenix.util.Repeat; import org.apache.phoenix.util.RunUntilFailure; import org.apache.phoenix.util.SchemaUtil; import org.apache.phoenix.util.StringUtil; import org.apache.phoenix.util.TestUtil; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(RunUntilFailure.class) public class MutationStateIT extends ParallelStatsDisabledIT { private static final String DDL = " (ORGANIZATION_ID CHAR(15) NOT NULL, SCORE DOUBLE, " + "ENTITY_ID CHAR(15) NOT NULL, TAGS VARCHAR, CONSTRAINT PAGE_SNAPSHOT_PK " + "PRIMARY KEY (ORGANIZATION_ID, ENTITY_ID DESC)) MULTI_TENANT=TRUE"; private static final Random RAND = new Random(5); private void upsertRows(PhoenixConnection conn, String fullTableName) throws SQLException { PreparedStatement stmt = conn.prepareStatement("upsert into " + fullTableName + " (organization_id, entity_id, score) values (?,?,?)"); for (int i = 0; i < 10000; i++) { stmt.setString(1, "AAAA" + i); stmt.setString(2, "BBBB" + i); stmt.setInt(3, 1); stmt.execute(); } } public static String randString(int length) { return new BigInteger(164, RAND).toString().substring(0, length); } private static void mutateRandomly(final String upsertStmt, final String fullTableName, final int nThreads, final int nRows, final int nIndexValues, final int batchSize, final CountDownLatch doneSignal) { Runnable[] runnables = new Runnable[nThreads]; for (int i = 0; i < nThreads; i++) { runnables[i] = new Runnable() { @Override public void run() { try { Connection conn = DriverManager.getConnection(getUrl()); for (int i = 0; i < nRows; i++) { PreparedStatement statement = conn.prepareStatement(upsertStmt); int index = 0; statement.setString(++index, randString(15)); statement.setString(++index, randString(15)); statement.setString(++index, randString(15)); statement.setString(++index, randString(1)); statement.setString(++index, randString(15)); statement.setString(++index, randString(15)); statement.setTimestamp(++index, new Timestamp(System.currentTimeMillis())); statement.setTimestamp(++index, new Timestamp(System.currentTimeMillis())); statement.setString(++index, randString(1)); statement.setString(++index, randString(1)); statement.setBoolean(++index, false); statement.setString(++index, randString(1)); statement.setString(++index, randString(1)); statement.setString(++index, randString(15)); statement.setString(++index, randString(15)); statement.setString(++index, randString(15)); statement.setInt(++index, RAND.nextInt()); statement.execute(); if ((i % batchSize) == 0) { conn.commit(); } } conn.commit(); } catch (SQLException e) { throw new RuntimeException(e); } finally { doneSignal.countDown(); } } }; } for (int i = 0; i < nThreads; i++) { Thread t = new Thread(runnables[i]); t.start(); } } @Test @Repeat(10) public void testOnlyIndexTableWriteFromClientSide() throws SQLException, InterruptedException, IOException { String schemaName = generateUniqueName(); String tableName = generateUniqueName(); String indexName1 = generateUniqueName(); String indexName2 = generateUniqueName(); String indexName3 = generateUniqueName(); String fullTableName = SchemaUtil.getTableName(schemaName, tableName); String fullIndexName1 = SchemaUtil.getTableName(schemaName, indexName1); String CREATE_DATA_TABLE = "CREATE TABLE IF NOT EXISTS " + fullTableName + " ( \n" + " USER1_ID CHAR(15) NOT NULL,\n" + " ELEMENT1_ID CHAR(15) NOT NULL,\n" + " ELEMENT_ID CHAR(15) NOT NULL,\n" + " ELEMENT_TYPE VARCHAR(1) NOT NULL,\n" + " TYPE_ID CHAR(15) NOT NULL,\n" + " USER_ID CHAR(15) NOT NULL,\n" + " ELEMENT4_TIME TIMESTAMP,\n" + " ELEMENT_UPDATE TIMESTAMP,\n" + " ELEMENT_SCORE DOUBLE,\n" + " ELEMENT2_TYPE VARCHAR(1),\n" + " ELEMENT1_TYPE VARCHAR(1),\n" + " ELEMENT1_IS_SYS_GEN BOOLEAN,\n" + " ELEMENT1_STATUS VARCHAR(1),\n" + " ELEMENT1_VISIBILITY VARCHAR(1),\n" + " ELEMENT3_ID CHAR(15),\n" + " ELEMENT4_BY CHAR(15),\n" + " BEST_ELEMENT_ID CHAR(15),\n" + " ELEMENT_COUNT INTEGER,\n" + " CONSTRAINT PK PRIMARY KEY\n" + " (\n" + " USER1_ID,\n" + " ELEMENT1_ID,\n" + " ELEMENT_ID,\n" + " ELEMENT_TYPE,\n" + " TYPE_ID,\n" + " USER_ID\n" + " )\n" + " ) VERSIONS=1,MULTI_TENANT=TRUE,TTL=31536000\n"; String CREATE_INDEX_1 = "CREATE INDEX IF NOT EXISTS " + indexName1 + " \n" + " ON " + fullTableName + " (\n" + " TYPE_ID,\n" + " ELEMENT_ID,\n" + " ELEMENT_TYPE,\n" + " USER_ID,\n" + " ELEMENT4_TIME DESC,\n" + " ELEMENT1_ID DESC\n" + " ) INCLUDE (\n" + " ELEMENT2_TYPE,\n" + " ELEMENT1_TYPE,\n" + " ELEMENT1_IS_SYS_GEN,\n" + " ELEMENT1_STATUS,\n" + " ELEMENT1_VISIBILITY,\n" + " ELEMENT3_ID,\n" + " ELEMENT4_BY,\n" + " BEST_ELEMENT_ID,\n" + " ELEMENT_COUNT\n" + " )\n"; String CREATE_INDEX_2 = " CREATE INDEX IF NOT EXISTS " + indexName2 + "\n" + " ON " + fullTableName + " (\n" + " TYPE_ID,\n" + " ELEMENT_ID,\n" + " ELEMENT_TYPE,\n" + " USER_ID,\n" + " ELEMENT_UPDATE DESC,\n" + " ELEMENT1_ID DESC\n" + " ) INCLUDE (\n" + " ELEMENT2_TYPE,\n" + " ELEMENT1_TYPE,\n" + " ELEMENT1_IS_SYS_GEN,\n" + " ELEMENT1_STATUS,\n" + " ELEMENT1_VISIBILITY,\n" + " ELEMENT3_ID,\n" + " ELEMENT4_BY,\n" + " BEST_ELEMENT_ID,\n" + " ELEMENT_COUNT\n" + " )\n"; String CREATE_INDEX_3 = "CREATE INDEX IF NOT EXISTS " + indexName3 + "\n" + " ON " + fullTableName + " (\n" + " TYPE_ID,\n" + " ELEMENT_ID,\n" + " ELEMENT_TYPE,\n" + " USER_ID,\n" + " ELEMENT_SCORE DESC,\n" + " ELEMENT1_ID DESC\n" + " ) INCLUDE (\n" + " ELEMENT2_TYPE,\n" + " ELEMENT1_TYPE,\n" + " ELEMENT1_IS_SYS_GEN,\n" + " ELEMENT1_STATUS,\n" + " ELEMENT1_VISIBILITY,\n" + " ELEMENT3_ID,\n" + " ELEMENT4_BY,\n" + " BEST_ELEMENT_ID,\n" + " ELEMENT_COUNT\n" + " )\n"; String UPSERT_INTO_DATA_TABLE = "UPSERT INTO " + fullTableName + "\n" + "(\n" + " USER1_ID,\n" + " ELEMENT1_ID,\n" + " ELEMENT_ID,\n" + " ELEMENT_TYPE,\n" + " TYPE_ID,\n" + " USER_ID,\n" + " ELEMENT4_TIME,\n" + " ELEMENT_UPDATE,\n" + " ELEMENT2_TYPE,\n" + " ELEMENT1_TYPE,\n" + " ELEMENT1_IS_SYS_GEN,\n" + " ELEMENT1_STATUS,\n" + " ELEMENT1_VISIBILITY,\n" + " ELEMENT3_ID,\n" + " ELEMENT4_BY,\n" + " BEST_ELEMENT_ID,\n" + " ELEMENT_COUNT\n" + ")" + "VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; int nThreads = 1; int nRows = 5000; int nIndexValues = 4000; int batchSize = 200; final CountDownLatch doneSignal = new CountDownLatch(nThreads); try (Connection conn = DriverManager.getConnection(getUrl())) { try { conn.createStatement().execute(CREATE_DATA_TABLE); conn.createStatement().execute(CREATE_INDEX_1); conn.createStatement().execute(CREATE_INDEX_2); conn.createStatement().execute(CREATE_INDEX_3); conn.commit(); mutateRandomly(UPSERT_INTO_DATA_TABLE, fullTableName, nThreads, nRows, nIndexValues, batchSize, doneSignal); Thread.sleep(200); unassignRegionAsync(fullIndexName1); assertTrue("Ran out of time", doneSignal.await(120, TimeUnit.SECONDS)); } finally { } long dataTableRows = TestUtil.getRowCount(conn, fullTableName); ResultSet rs = conn.getMetaData().getTables(null, StringUtil.escapeLike(schemaName), null, new String[] { PTableType.INDEX.toString() }); while (rs.next()) { String indexState = rs.getString("INDEX_STATE"); String indexName = rs.getString(3); long rowCountIndex = TestUtil.getRowCount(conn, SchemaUtil.getTableName(schemaName, indexName)); if (indexState.equals(PIndexState.ACTIVE.name())) { assertTrue(dataTableRows == rowCountIndex); } else { assertTrue(dataTableRows > rowCountIndex); } } } catch (InterruptedException e) { throw e; } catch (IOException e) { throw e; } } @Test public void testDeleteMaxMutationSize() throws SQLException { String tableName = generateUniqueName(); int NUMBER_OF_ROWS = 20; String ddl = "CREATE TABLE " + tableName + " (V BIGINT PRIMARY KEY, K BIGINT)"; PhoenixConnection conn = (PhoenixConnection) DriverManager.getConnection(getUrl()); conn.createStatement().execute(ddl); for(int i = 0; i < NUMBER_OF_ROWS; i++) { conn.createStatement().execute( "UPSERT INTO " + tableName + " VALUES (" + i + ", "+ i + ")"); conn.commit(); } Properties props = new Properties(); props.setProperty(QueryServices.MAX_MUTATION_SIZE_ATTRIB, String.valueOf(NUMBER_OF_ROWS / 2)); PhoenixConnection connection = (PhoenixConnection) DriverManager.getConnection(getUrl(), props); connection.setAutoCommit(false); try { for(int i = 0; i < NUMBER_OF_ROWS; i++) { connection.createStatement().execute( "DELETE FROM " + tableName + " WHERE K = " + i ); } } catch (SQLException e) { assertTrue(e.getMessage().contains( SQLExceptionCode.MAX_MUTATION_SIZE_EXCEEDED.getMessage())); } props.setProperty(QueryServices.MAX_MUTATION_SIZE_BYTES_ATTRIB, "10"); props.setProperty(QueryServices.MAX_MUTATION_SIZE_ATTRIB, "10000"); connection = (PhoenixConnection) DriverManager.getConnection(getUrl(), props); connection.setAutoCommit(false); try { connection.createStatement().execute("DELETE FROM " + tableName ); } catch (SQLException e) { assertTrue(e.getMessage().contains( SQLExceptionCode.MAX_MUTATION_SIZE_BYTES_EXCEEDED.getMessage())); } } @Test public void testUpsertMaxMutationSize() throws Exception { Properties connectionProperties = new Properties(); connectionProperties.setProperty(QueryServices.MAX_MUTATION_SIZE_ATTRIB, "3"); connectionProperties.setProperty(QueryServices.MAX_MUTATION_SIZE_BYTES_ATTRIB, "1000000"); PhoenixConnection connection = (PhoenixConnection) DriverManager.getConnection(getUrl(), connectionProperties); String fullTableName = generateUniqueName(); try (Statement stmt = connection.createStatement()) { stmt.execute( "CREATE TABLE " + fullTableName + DDL); } try { upsertRows(connection, fullTableName); fail(); } catch (SQLException e) { assertEquals(SQLExceptionCode.MAX_MUTATION_SIZE_EXCEEDED.getErrorCode(), e.getErrorCode()); assertTrue(e.getMessage().contains( SQLExceptionCode.MAX_MUTATION_SIZE_EXCEEDED.getMessage())); } // set the max mutation size (bytes) to a low value connectionProperties.setProperty(QueryServices.MAX_MUTATION_SIZE_ATTRIB, "1000"); connectionProperties.setProperty(QueryServices.MAX_MUTATION_SIZE_BYTES_ATTRIB, "4"); connection = (PhoenixConnection) DriverManager.getConnection(getUrl(), connectionProperties); try { upsertRows(connection, fullTableName); fail(); } catch (SQLException e) { assertEquals(SQLExceptionCode.MAX_MUTATION_SIZE_BYTES_EXCEEDED.getErrorCode(), e.getErrorCode()); assertTrue(e.getMessage().contains( SQLExceptionCode.MAX_MUTATION_SIZE_BYTES_EXCEEDED.getMessage())); } } @Test public void testMutationEstimatedSize() throws Exception { PhoenixConnection conn = (PhoenixConnection) DriverManager.getConnection(getUrl()); conn.setAutoCommit(false); String fullTableName = generateUniqueName(); try (Statement stmt = conn.createStatement()) { stmt.execute( "CREATE TABLE " + fullTableName + DDL); } // upserting rows should increase the mutation state size MutationState state = conn.unwrap(PhoenixConnection.class).getMutationState(); long prevEstimatedSize = state.getEstimatedSize(); upsertRows(conn, fullTableName); assertTrue("Mutation state size should have increased", state.getEstimatedSize() > prevEstimatedSize); // after commit or rollback the size should be zero conn.commit(); assertEquals("Mutation state size should be zero after commit", 0, state.getEstimatedSize()); upsertRows(conn, fullTableName); conn.rollback(); assertEquals("Mutation state size should be zero after rollback", 0, state.getEstimatedSize()); // upsert one row PreparedStatement stmt = conn.prepareStatement("upsert into " + fullTableName + " (organization_id, entity_id, score) values (?,?,?)"); stmt.setString(1, "ZZZZ"); stmt.setString(2, "YYYY"); stmt.setInt(3, 1); stmt.execute(); assertTrue("Mutation state size should be greater than zero ", state.getEstimatedSize()>0); prevEstimatedSize = state.getEstimatedSize(); // upserting the same row twice should not increase the size stmt.setString(1, "ZZZZ"); stmt.setString(2, "YYYY"); stmt.setInt(3, 1); stmt.execute(); assertEquals( "Mutation state size should only increase 4 bytes (size of the new statement index)", prevEstimatedSize + 4, state.getEstimatedSize()); prevEstimatedSize = state.getEstimatedSize(); // changing the value of one column of a row to a larger value should increase the estimated size stmt = conn.prepareStatement("upsert into " + fullTableName + " (organization_id, entity_id, score, tags) values (?,?,?,?)"); stmt.setString(1, "ZZZZ"); stmt.setString(2, "YYYY"); stmt.setInt(3, 1); stmt.setString(4, "random text string random text string random text string"); stmt.execute(); assertTrue("Mutation state size should increase", prevEstimatedSize+4 < state.getEstimatedSize()); prevEstimatedSize = state.getEstimatedSize(); // changing the value of one column of a row to a smaller value should decrease the estimated size stmt = conn.prepareStatement("upsert into " + fullTableName + " (organization_id, entity_id, score, tags) values (?,?,?,?)"); stmt.setString(1, "ZZZZ"); stmt.setString(2, "YYYY"); stmt.setInt(3, 1); stmt.setString(4, ""); stmt.execute(); assertTrue("Mutation state size should decrease", prevEstimatedSize+4 > state.getEstimatedSize()); } @Test public void testSplitMutationsIntoSameGroupForSingleRow() throws Exception { String tableName = "TBL_" + generateUniqueName(); String indexName = "IDX_" + generateUniqueName(); Properties props = new Properties(); props.put("phoenix.mutate.batchSize", "2"); try (PhoenixConnection conn = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class)) { conn.setAutoCommit(false); conn.createStatement().executeUpdate( "CREATE TABLE " + tableName + " (" + "A VARCHAR NOT NULL PRIMARY KEY," + "B VARCHAR," + "C VARCHAR," + "D VARCHAR) COLUMN_ENCODED_BYTES = 0"); conn.createStatement().executeUpdate("CREATE INDEX " + indexName + " on " + tableName + " (C) INCLUDE(D)"); conn.createStatement().executeUpdate("UPSERT INTO " + tableName + "(A,B,C,D) VALUES ('A2','B2','C2','D2')"); conn.createStatement().executeUpdate("UPSERT INTO " + tableName + "(A,B,C,D) VALUES ('A3','B3', 'C3', null)"); conn.commit(); Table htable = conn.getQueryServices().getTable(Bytes.toBytes(tableName)); Scan scan = new Scan(); scan.setRaw(true); Iterator<Result> scannerIter = htable.getScanner(scan).iterator(); while (scannerIter.hasNext()) { long ts = -1; Result r = scannerIter.next(); for (Cell cell : r.listCells()) { if (ts == -1) { ts = cell.getTimestamp(); } else { assertEquals("(" + cell.toString() + ") has different ts", ts, cell.getTimestamp()); } } } htable.close(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.service.persistent; import static org.apache.pulsar.common.events.EventsTopicNames.checkTopicIsEventsNames; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.TreeMap; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.LongAdder; import org.apache.bookkeeper.mledger.AsyncCallbacks; import org.apache.bookkeeper.mledger.AsyncCallbacks.ClearBacklogCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.DeleteCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.MarkDeleteCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.ReadEntryCallback; import org.apache.bookkeeper.mledger.Entry; import org.apache.bookkeeper.mledger.ManagedCursor; import org.apache.bookkeeper.mledger.ManagedCursor.IndividualDeletedEntries; import org.apache.bookkeeper.mledger.ManagedLedgerException; import org.apache.bookkeeper.mledger.ManagedLedgerException.ConcurrentFindCursorPositionException; import org.apache.bookkeeper.mledger.ManagedLedgerException.InvalidCursorPositionException; import org.apache.bookkeeper.mledger.Position; import org.apache.bookkeeper.mledger.impl.ManagedCursorImpl; import org.apache.bookkeeper.mledger.impl.ManagedLedgerImpl; import org.apache.bookkeeper.mledger.impl.PositionImpl; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.pulsar.broker.intercept.BrokerInterceptor; import org.apache.pulsar.broker.service.BrokerServiceException; import org.apache.pulsar.broker.service.BrokerServiceException.NotAllowedException; import org.apache.pulsar.broker.service.BrokerServiceException.ServerMetadataException; import org.apache.pulsar.broker.service.BrokerServiceException.SubscriptionBusyException; import org.apache.pulsar.broker.service.BrokerServiceException.SubscriptionFencedException; import org.apache.pulsar.broker.service.BrokerServiceException.SubscriptionInvalidCursorPosition; import org.apache.pulsar.broker.service.Consumer; import org.apache.pulsar.broker.service.Dispatcher; import org.apache.pulsar.broker.service.Subscription; import org.apache.pulsar.broker.service.Topic; import org.apache.pulsar.broker.transaction.pendingack.PendingAckHandle; import org.apache.pulsar.broker.transaction.pendingack.impl.MLPendingAckStore; import org.apache.pulsar.broker.transaction.pendingack.impl.PendingAckHandleDisabled; import org.apache.pulsar.broker.transaction.pendingack.impl.PendingAckHandleImpl; import org.apache.pulsar.client.api.transaction.TxnID; import org.apache.pulsar.common.api.proto.CommandAck.AckType; import org.apache.pulsar.common.api.proto.CommandSubscribe.SubType; import org.apache.pulsar.common.api.proto.KeySharedMeta; import org.apache.pulsar.common.api.proto.MessageMetadata; import org.apache.pulsar.common.api.proto.ReplicatedSubscriptionsSnapshot; import org.apache.pulsar.common.api.proto.TxnAction; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.policies.data.ConsumerStats; import org.apache.pulsar.common.policies.data.SubscriptionStats; import org.apache.pulsar.common.protocol.Commands; import org.apache.pulsar.common.protocol.Markers; import org.apache.pulsar.common.util.FutureUtil; import org.apache.pulsar.transaction.coordinator.impl.MLTransactionLogImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PersistentSubscription implements Subscription { protected final PersistentTopic topic; protected final ManagedCursor cursor; protected volatile Dispatcher dispatcher; protected final String topicName; protected final String subName; protected final String fullName; private static final int FALSE = 0; private static final int TRUE = 1; private static final AtomicIntegerFieldUpdater<PersistentSubscription> IS_FENCED_UPDATER = AtomicIntegerFieldUpdater.newUpdater(PersistentSubscription.class, "isFenced"); private volatile int isFenced = FALSE; private PersistentMessageExpiryMonitor expiryMonitor; private long lastExpireTimestamp = 0L; private long lastConsumedFlowTimestamp = 0L; private long lastMarkDeleteAdvancedTimestamp = 0L; // for connected subscriptions, message expiry will be checked if the backlog is greater than this threshold private static final int MINIMUM_BACKLOG_FOR_EXPIRY_CHECK = 1000; private static final String REPLICATED_SUBSCRIPTION_PROPERTY = "pulsar.replicated.subscription"; // Map of properties that is used to mark this subscription as "replicated". // Since this is the only field at this point, we can just keep a static // instance of the map. private static final Map<String, Long> REPLICATED_SUBSCRIPTION_CURSOR_PROPERTIES = new TreeMap<>(); private static final Map<String, Long> NON_REPLICATED_SUBSCRIPTION_CURSOR_PROPERTIES = Collections.emptyMap(); private volatile ReplicatedSubscriptionSnapshotCache replicatedSubscriptionSnapshotCache; private volatile Position lastMarkDeleteForTransactionMarker; private volatile boolean isDeleteTransactionMarkerInProcess = false; private final PendingAckHandle pendingAckHandle; private final LongAdder bytesOutFromRemovedConsumers = new LongAdder(); private final LongAdder msgOutFromRemovedConsumer = new LongAdder(); static { REPLICATED_SUBSCRIPTION_CURSOR_PROPERTIES.put(REPLICATED_SUBSCRIPTION_PROPERTY, 1L); } static Map<String, Long> getBaseCursorProperties(boolean isReplicated) { return isReplicated ? REPLICATED_SUBSCRIPTION_CURSOR_PROPERTIES : NON_REPLICATED_SUBSCRIPTION_CURSOR_PROPERTIES; } static boolean isCursorFromReplicatedSubscription(ManagedCursor cursor) { return cursor.getProperties().containsKey(REPLICATED_SUBSCRIPTION_PROPERTY); } public PersistentSubscription(PersistentTopic topic, String subscriptionName, ManagedCursor cursor, boolean replicated) { this.topic = topic; this.cursor = cursor; this.topicName = topic.getName(); this.subName = subscriptionName; this.fullName = MoreObjects.toStringHelper(this).add("topic", topicName).add("name", subName).toString(); this.expiryMonitor = new PersistentMessageExpiryMonitor(topicName, subscriptionName, cursor, this); this.setReplicated(replicated); if (topic.getBrokerService().getPulsar().getConfig().isTransactionCoordinatorEnabled() && !checkTopicIsEventsNames(topicName) && !topicName.startsWith(TopicName.TRANSACTION_COORDINATOR_ASSIGN.getLocalName()) && !topicName.startsWith(MLTransactionLogImpl.TRANSACTION_LOG_PREFIX) && !topicName.endsWith(MLPendingAckStore.PENDING_ACK_STORE_SUFFIX)) { this.pendingAckHandle = new PendingAckHandleImpl(this); } else { this.pendingAckHandle = new PendingAckHandleDisabled(); } IS_FENCED_UPDATER.set(this, FALSE); } public void updateLastMarkDeleteAdvancedTimestamp() { this.lastMarkDeleteAdvancedTimestamp = Math.max(this.lastMarkDeleteAdvancedTimestamp, System.currentTimeMillis()); } @Override public BrokerInterceptor interceptor() { return topic.getBrokerService().getInterceptor(); } @Override public String getName() { return this.subName; } @Override public Topic getTopic() { return topic; } @Override public boolean isReplicated() { return replicatedSubscriptionSnapshotCache != null; } void setReplicated(boolean replicated) { this.replicatedSubscriptionSnapshotCache = replicated ? new ReplicatedSubscriptionSnapshotCache(subName, topic.getBrokerService().pulsar().getConfiguration() .getReplicatedSubscriptionsSnapshotMaxCachedPerSubscription()) : null; } @Override public CompletableFuture<Void> addConsumer(Consumer consumer) { return pendingAckHandle.pendingAckHandleFuture().thenCompose(future -> { synchronized (PersistentSubscription.this) { cursor.updateLastActive(); if (IS_FENCED_UPDATER.get(this) == TRUE) { log.warn("Attempting to add consumer {} on a fenced subscription", consumer); return FutureUtil.failedFuture(new SubscriptionFencedException("Subscription is fenced")); } if (dispatcher == null || !dispatcher.isConsumerConnected()) { Dispatcher previousDispatcher = null; boolean useStreamingDispatcher = topic.getBrokerService().getPulsar() .getConfiguration().isStreamingDispatch(); switch (consumer.subType()) { case Exclusive: if (dispatcher == null || dispatcher.getType() != SubType.Exclusive) { previousDispatcher = dispatcher; dispatcher = useStreamingDispatcher ? new PersistentStreamingDispatcherSingleActiveConsumer( cursor, SubType.Exclusive, 0, topic, this) : new PersistentDispatcherSingleActiveConsumer( cursor, SubType.Exclusive, 0, topic, this); } break; case Shared: if (dispatcher == null || dispatcher.getType() != SubType.Shared) { previousDispatcher = dispatcher; dispatcher = useStreamingDispatcher ? new PersistentStreamingDispatcherMultipleConsumers( topic, cursor, this) : new PersistentDispatcherMultipleConsumers(topic, cursor, this); } break; case Failover: int partitionIndex = TopicName.getPartitionIndex(topicName); if (partitionIndex < 0) { // For non partition topics, use a negative index so // dispatcher won't sort consumers before picking // an active consumer for the topic. partitionIndex = -1; } if (dispatcher == null || dispatcher.getType() != SubType.Failover) { previousDispatcher = dispatcher; dispatcher = useStreamingDispatcher ? new PersistentStreamingDispatcherSingleActiveConsumer( cursor, SubType.Failover, partitionIndex, topic, this) : new PersistentDispatcherSingleActiveConsumer(cursor, SubType.Failover, partitionIndex, topic, this); } break; case Key_Shared: if (dispatcher == null || dispatcher.getType() != SubType.Key_Shared) { previousDispatcher = dispatcher; KeySharedMeta ksm = consumer.getKeySharedMeta(); dispatcher = new PersistentStickyKeyDispatcherMultipleConsumers(topic, cursor, this, topic.getBrokerService().getPulsar().getConfiguration(), ksm); } break; default: return FutureUtil.failedFuture( new ServerMetadataException("Unsupported subscription type")); } if (previousDispatcher != null) { previousDispatcher.close().thenRun(() -> { log.info("[{}][{}] Successfully closed previous dispatcher", topicName, subName); }).exceptionally(ex -> { log.error("[{}][{}] Failed to close previous dispatcher", topicName, subName, ex); return null; }); } } else { if (consumer.subType() != dispatcher.getType()) { return FutureUtil.failedFuture( new SubscriptionBusyException("Subscription is of different type")); } } try { dispatcher.addConsumer(consumer); return CompletableFuture.completedFuture(null); } catch (BrokerServiceException brokerServiceException) { return FutureUtil.failedFuture(brokerServiceException); } } }); } @Override public synchronized void removeConsumer(Consumer consumer, boolean isResetCursor) throws BrokerServiceException { cursor.updateLastActive(); if (dispatcher != null) { dispatcher.removeConsumer(consumer); } // preserve accumulative stats form removed consumer ConsumerStats stats = consumer.getStats(); bytesOutFromRemovedConsumers.add(stats.bytesOutCounter); msgOutFromRemovedConsumer.add(stats.msgOutCounter); if (dispatcher != null && dispatcher.getConsumers().isEmpty()) { deactivateCursor(); if (!cursor.isDurable()) { // If cursor is not durable, we need to clean up the subscription as well this.close().thenRun(() -> { synchronized (this) { if (dispatcher != null) { dispatcher.close().thenRun(() -> { log.info("[{}][{}] Successfully closed dispatcher for reader", topicName, subName); }).exceptionally(ex -> { log.error("[{}][{}] Failed to close dispatcher for reader", topicName, subName, ex); return null; }); } } }).exceptionally(exception -> { log.error("[{}][{}] Failed to close subscription for reader", topicName, subName, exception); return null; }); // when topic closes: it iterates through concurrent-subscription map to close each subscription. so, // topic.remove again try to access same map which creates deadlock. so, execute it in different thread. topic.getBrokerService().pulsar().getExecutor().submit(() ->{ topic.removeSubscription(subName); // Also need remove the cursor here, otherwise the data deletion will not work well. // Because data deletion depends on the mark delete position of all cursors. if (!isResetCursor) { try { topic.getManagedLedger().deleteCursor(cursor.getName()); } catch (InterruptedException | ManagedLedgerException e) { log.warn("[{}] [{}] Failed to remove non durable cursor", topic.getName(), subName, e); } } }); } } // invalid consumer remove will throw an exception // decrement usage is triggered only for valid consumer close topic.decrementUsageCount(); if (log.isDebugEnabled()) { log.debug("[{}] [{}] [{}] Removed consumer -- count: {}", topic.getName(), subName, consumer.consumerName(), topic.currentUsageCount()); } } public void deactivateCursor() { this.cursor.setInactive(); } @Override public void consumerFlow(Consumer consumer, int additionalNumberOfMessages) { this.lastConsumedFlowTimestamp = System.currentTimeMillis(); dispatcher.consumerFlow(consumer, additionalNumberOfMessages); } @Override public void acknowledgeMessage(List<Position> positions, AckType ackType, Map<String, Long> properties) { Position previousMarkDeletePosition = cursor.getMarkDeletedPosition(); if (ackType == AckType.Cumulative) { if (positions.size() != 1) { log.warn("[{}][{}] Invalid cumulative ack received with multiple message ids.", topicName, subName); return; } Position position = positions.get(0); if (log.isDebugEnabled()) { log.debug("[{}][{}] Cumulative ack on {}", topicName, subName, position); } cursor.asyncMarkDelete(position, mergeCursorProperties(properties), markDeleteCallback, previousMarkDeletePosition); } else { if (log.isDebugEnabled()) { log.debug("[{}][{}] Individual acks on {}", topicName, subName, positions); } cursor.asyncDelete(positions, deleteCallback, previousMarkDeletePosition); if (topic.getBrokerService().getPulsar().getConfig().isTransactionCoordinatorEnabled()) { positions.forEach(position -> { if (((ManagedCursorImpl) cursor).isMessageDeleted(position)) { pendingAckHandle.clearIndividualPosition(position); } }); } if (dispatcher != null) { dispatcher.getRedeliveryTracker().removeBatch(positions); } } if (!cursor.getMarkDeletedPosition().equals(previousMarkDeletePosition)) { this.updateLastMarkDeleteAdvancedTimestamp(); // Mark delete position advance ReplicatedSubscriptionSnapshotCache snapshotCache = this.replicatedSubscriptionSnapshotCache; if (snapshotCache != null) { ReplicatedSubscriptionsSnapshot snapshot = snapshotCache .advancedMarkDeletePosition((PositionImpl) cursor.getMarkDeletedPosition()); if (snapshot != null) { topic.getReplicatedSubscriptionController() .ifPresent(c -> c.localSubscriptionUpdated(subName, snapshot)); } } } if (topic.getBrokerService().getPulsar().getConfig().isTransactionCoordinatorEnabled()) { Position currentMarkDeletePosition = cursor.getMarkDeletedPosition(); if ((lastMarkDeleteForTransactionMarker == null || ((PositionImpl) lastMarkDeleteForTransactionMarker) .compareTo((PositionImpl) currentMarkDeletePosition) < 0) && !isDeleteTransactionMarkerInProcess) { isDeleteTransactionMarkerInProcess = true; deleteTransactionMarker((PositionImpl) currentMarkDeletePosition, ackType, properties); } } if (topic.getManagedLedger().isTerminated() && cursor.getNumberOfEntriesInBacklog(false) == 0) { // Notify all consumer that the end of topic was reached if (dispatcher != null) { dispatcher.getConsumers().forEach(Consumer::reachedEndOfTopic); } } } private void deleteTransactionMarker(PositionImpl position, AckType ackType, Map<String, Long> properties) { if (position != null) { ManagedLedgerImpl managedLedger = ((ManagedLedgerImpl) cursor.getManagedLedger()); PositionImpl nextPosition = managedLedger.getNextValidPosition(position); if (nextPosition != null && nextPosition.compareTo((PositionImpl) managedLedger.getLastConfirmedEntry()) <= 0) { managedLedger.asyncReadEntry(nextPosition, new ReadEntryCallback() { @Override public void readEntryComplete(Entry entry, Object ctx) { try { MessageMetadata messageMetadata = Commands.parseMessageMetadata(entry.getDataBuffer()); isDeleteTransactionMarkerInProcess = false; if (Markers.isTxnCommitMarker(messageMetadata) || Markers.isTxnAbortMarker(messageMetadata)) { lastMarkDeleteForTransactionMarker = position; acknowledgeMessage(Collections.singletonList(nextPosition), ackType, properties); } } finally { entry.release(); } } @Override public void readEntryFailed(ManagedLedgerException exception, Object ctx) { isDeleteTransactionMarkerInProcess = false; log.error("Fail to read transaction marker! Position : {}", position, exception); } }, null); } else { isDeleteTransactionMarkerInProcess = false; } } else { isDeleteTransactionMarkerInProcess = false; } } public CompletableFuture<Void> transactionIndividualAcknowledge( TxnID txnId, List<MutablePair<PositionImpl, Integer>> positions) { return pendingAckHandle.individualAcknowledgeMessage(txnId, positions); } public CompletableFuture<Void> transactionCumulativeAcknowledge(TxnID txnId, List<PositionImpl> positions) { return pendingAckHandle.cumulativeAcknowledgeMessage(txnId, positions); } private final MarkDeleteCallback markDeleteCallback = new MarkDeleteCallback() { @Override public void markDeleteComplete(Object ctx) { PositionImpl oldMD = (PositionImpl) ctx; PositionImpl newMD = (PositionImpl) cursor.getMarkDeletedPosition(); if (log.isDebugEnabled()) { log.debug("[{}][{}] Mark deleted messages to position {} from position {}", topicName, subName, newMD, oldMD); } // Signal the dispatchers to give chance to take extra actions notifyTheMarkDeletePositionMoveForwardIfNeeded(oldMD); } @Override public void markDeleteFailed(ManagedLedgerException exception, Object ctx) { // TODO: cut consumer connection on markDeleteFailed if (log.isDebugEnabled()) { log.debug("[{}][{}] Failed to mark delete for position {}: {}", topicName, subName, ctx, exception); } } }; private final DeleteCallback deleteCallback = new DeleteCallback() { @Override public void deleteComplete(Object position) { if (log.isDebugEnabled()) { log.debug("[{}][{}] Deleted message at {}", topicName, subName, position); } // Signal the dispatchers to give chance to take extra actions notifyTheMarkDeletePositionMoveForwardIfNeeded((PositionImpl) position); } @Override public void deleteFailed(ManagedLedgerException exception, Object ctx) { log.warn("[{}][{}] Failed to delete message at {}: {}", topicName, subName, ctx, exception); } }; private void notifyTheMarkDeletePositionMoveForwardIfNeeded(Position oldPosition) { PositionImpl oldMD = (PositionImpl) oldPosition; PositionImpl newMD = (PositionImpl) cursor.getMarkDeletedPosition(); if (dispatcher != null && newMD.compareTo(oldMD) > 0) { dispatcher.markDeletePositionMoveForward(); } } @Override public String toString() { return fullName; } @Override public String getTopicName() { return this.topicName; } @Override public SubType getType() { return dispatcher != null ? dispatcher.getType() : null; } @Override public String getTypeString() { SubType type = getType(); if (type == null) { return "None"; } switch (type) { case Exclusive: return "Exclusive"; case Failover: return "Failover"; case Shared: return "Shared"; case Key_Shared: return "Key_Shared"; } return "Null"; } @Override public CompletableFuture<Void> clearBacklog() { CompletableFuture<Void> future = new CompletableFuture<>(); if (log.isDebugEnabled()) { log.debug("[{}][{}] Backlog size before clearing: {}", topicName, subName, cursor.getNumberOfEntriesInBacklog(false)); } cursor.asyncClearBacklog(new ClearBacklogCallback() { @Override public void clearBacklogComplete(Object ctx) { if (log.isDebugEnabled()) { log.debug("[{}][{}] Backlog size after clearing: {}", topicName, subName, cursor.getNumberOfEntriesInBacklog(false)); } if (dispatcher != null) { dispatcher.clearDelayedMessages(); } future.complete(null); } @Override public void clearBacklogFailed(ManagedLedgerException exception, Object ctx) { log.error("[{}][{}] Failed to clear backlog", topicName, subName, exception); future.completeExceptionally(exception); } }, null); return future; } @Override public CompletableFuture<Void> skipMessages(int numMessagesToSkip) { CompletableFuture<Void> future = new CompletableFuture<>(); if (log.isDebugEnabled()) { log.debug("[{}][{}] Skipping {} messages, current backlog {}", topicName, subName, numMessagesToSkip, cursor.getNumberOfEntriesInBacklog(false)); } cursor.asyncSkipEntries(numMessagesToSkip, IndividualDeletedEntries.Exclude, new AsyncCallbacks.SkipEntriesCallback() { @Override public void skipEntriesComplete(Object ctx) { if (log.isDebugEnabled()) { log.debug("[{}][{}] Skipped {} messages, new backlog {}", topicName, subName, numMessagesToSkip, cursor.getNumberOfEntriesInBacklog(false)); } future.complete(null); } @Override public void skipEntriesFailed(ManagedLedgerException exception, Object ctx) { log.error("[{}][{}] Failed to skip {} messages", topicName, subName, numMessagesToSkip, exception); future.completeExceptionally(exception); } }, null); return future; } @Override public CompletableFuture<Void> resetCursor(long timestamp) { CompletableFuture<Void> future = new CompletableFuture<>(); PersistentMessageFinder persistentMessageFinder = new PersistentMessageFinder(topicName, cursor); if (log.isDebugEnabled()) { log.debug("[{}][{}] Resetting subscription to timestamp {}", topicName, subName, timestamp); } persistentMessageFinder.findMessages(timestamp, new AsyncCallbacks.FindEntryCallback() { @Override public void findEntryComplete(Position position, Object ctx) { final Position finalPosition; if (position == null) { // this should not happen ideally unless a reset is requested for a time // that spans beyond the retention limits (time/size) finalPosition = cursor.getFirstPosition(); if (finalPosition == null) { log.warn("[{}][{}] Unable to find position for timestamp {}." + " Unable to reset cursor to first position", topicName, subName, timestamp); future.completeExceptionally( new SubscriptionInvalidCursorPosition( "Unable to find position for specified timestamp")); return; } log.info( "[{}][{}] Unable to find position for timestamp {}." + " Resetting cursor to first position {} in ledger", topicName, subName, timestamp, finalPosition); } else { finalPosition = position.getNext(); } resetCursor(finalPosition, future); } @Override public void findEntryFailed(ManagedLedgerException exception, Optional<Position> failedReadPosition, Object ctx) { // todo - what can go wrong here that needs to be retried? if (exception instanceof ConcurrentFindCursorPositionException) { future.completeExceptionally(new SubscriptionBusyException(exception.getMessage())); } else { future.completeExceptionally(new BrokerServiceException(exception)); } } }); return future; } @Override public CompletableFuture<Void> resetCursor(Position position) { CompletableFuture<Void> future = new CompletableFuture<>(); resetCursor(position, future); return future; } private void resetCursor(Position finalPosition, CompletableFuture<Void> future) { if (!IS_FENCED_UPDATER.compareAndSet(PersistentSubscription.this, FALSE, TRUE)) { future.completeExceptionally(new SubscriptionBusyException("Failed to fence subscription")); return; } final CompletableFuture<Void> disconnectFuture; // Lock the Subscription object before locking the Dispatcher object to avoid deadlocks synchronized (this) { if (dispatcher != null && dispatcher.isConsumerConnected()) { disconnectFuture = dispatcher.disconnectActiveConsumers(true); } else { disconnectFuture = CompletableFuture.completedFuture(null); } } disconnectFuture.whenComplete((aVoid, throwable) -> { if (dispatcher != null) { dispatcher.resetCloseFuture(); } if (throwable != null) { log.error("[{}][{}] Failed to disconnect consumer from subscription", topicName, subName, throwable); IS_FENCED_UPDATER.set(PersistentSubscription.this, FALSE); future.completeExceptionally( new SubscriptionBusyException("Failed to disconnect consumers from subscription")); return; } log.info("[{}][{}] Successfully disconnected consumers from subscription, proceeding with cursor reset", topicName, subName); try { cursor.asyncResetCursor(finalPosition, new AsyncCallbacks.ResetCursorCallback() { @Override public void resetComplete(Object ctx) { if (log.isDebugEnabled()) { log.debug("[{}][{}] Successfully reset subscription to position {}", topicName, subName, finalPosition); } if (dispatcher != null) { dispatcher.cursorIsReset(); } IS_FENCED_UPDATER.set(PersistentSubscription.this, FALSE); future.complete(null); } @Override public void resetFailed(ManagedLedgerException exception, Object ctx) { log.error("[{}][{}] Failed to reset subscription to position {}", topicName, subName, finalPosition, exception); IS_FENCED_UPDATER.set(PersistentSubscription.this, FALSE); // todo - retry on InvalidCursorPositionException // or should we just ask user to retry one more time? if (exception instanceof InvalidCursorPositionException) { future.completeExceptionally(new SubscriptionInvalidCursorPosition(exception.getMessage())); } else if (exception instanceof ConcurrentFindCursorPositionException) { future.completeExceptionally(new SubscriptionBusyException(exception.getMessage())); } else { future.completeExceptionally(new BrokerServiceException(exception)); } } }); } catch (Exception e) { log.error("[{}][{}] Error while resetting cursor", topicName, subName, e); IS_FENCED_UPDATER.set(PersistentSubscription.this, FALSE); future.completeExceptionally(new BrokerServiceException(e)); } }); } @Override public CompletableFuture<Entry> peekNthMessage(int messagePosition) { CompletableFuture<Entry> future = new CompletableFuture<>(); if (log.isDebugEnabled()) { log.debug("[{}][{}] Getting message at position {}", topicName, subName, messagePosition); } cursor.asyncGetNthEntry(messagePosition, IndividualDeletedEntries.Exclude, new ReadEntryCallback() { @Override public void readEntryFailed(ManagedLedgerException exception, Object ctx) { future.completeExceptionally(exception); } @Override public void readEntryComplete(Entry entry, Object ctx) { future.complete(entry); } }, null); return future; } @Override public long getNumberOfEntriesInBacklog(boolean getPreciseBacklog) { return cursor.getNumberOfEntriesInBacklog(getPreciseBacklog); } @Override public synchronized Dispatcher getDispatcher() { return this.dispatcher; } public long getNumberOfEntriesSinceFirstNotAckedMessage() { return cursor.getNumberOfEntriesSinceFirstNotAckedMessage(); } public int getTotalNonContiguousDeletedMessagesRange() { return cursor.getTotalNonContiguousDeletedMessagesRange(); } /** * Close the cursor ledger for this subscription. Requires that there are no active consumers on the dispatcher * * @return CompletableFuture indicating the completion of delete operation */ @Override public CompletableFuture<Void> close() { synchronized (this) { if (dispatcher != null && dispatcher.isConsumerConnected()) { return FutureUtil.failedFuture(new SubscriptionBusyException("Subscription has active consumers")); } return this.pendingAckHandle.close().thenAccept(v -> { IS_FENCED_UPDATER.set(this, TRUE); log.info("[{}][{}] Successfully closed subscription [{}]", topicName, subName, cursor); }); } } /** * Disconnect all consumers attached to the dispatcher and close this subscription. * * @return CompletableFuture indicating the completion of disconnect operation */ @Override public synchronized CompletableFuture<Void> disconnect() { CompletableFuture<Void> disconnectFuture = new CompletableFuture<>(); // block any further consumers on this subscription IS_FENCED_UPDATER.set(this, TRUE); (dispatcher != null ? dispatcher.close() : CompletableFuture.completedFuture(null)) .thenCompose(v -> close()).thenRun(() -> { log.info("[{}][{}] Successfully disconnected and closed subscription", topicName, subName); disconnectFuture.complete(null); }).exceptionally(exception -> { IS_FENCED_UPDATER.set(this, FALSE); if (dispatcher != null) { dispatcher.reset(); } log.error("[{}][{}] Error disconnecting consumers from subscription", topicName, subName, exception); disconnectFuture.completeExceptionally(exception); return null; }); return disconnectFuture; } /** * Delete the subscription by closing and deleting its managed cursor if no consumers are connected to it. Handle * unsubscribe call from admin layer. * * @return CompletableFuture indicating the completion of delete operation */ @Override public CompletableFuture<Void> delete() { return delete(false); } /** * Forcefully close all consumers and deletes the subscription. * @return */ @Override public CompletableFuture<Void> deleteForcefully() { return delete(true); } /** * Delete the subscription by closing and deleting its managed cursor. Handle unsubscribe call from admin layer. * * @param closeIfConsumersConnected * Flag indicate whether explicitly close connected consumers before trying to delete subscription. If * any consumer is connected to it and if this flag is disable then this operation fails. * @return CompletableFuture indicating the completion of delete operation */ private CompletableFuture<Void> delete(boolean closeIfConsumersConnected) { CompletableFuture<Void> deleteFuture = new CompletableFuture<>(); log.info("[{}][{}] Unsubscribing", topicName, subName); CompletableFuture<Void> closeSubscriptionFuture = new CompletableFuture<>(); if (closeIfConsumersConnected) { this.disconnect().thenRun(() -> { closeSubscriptionFuture.complete(null); }).exceptionally(ex -> { log.error("[{}][{}] Error disconnecting and closing subscription", topicName, subName, ex); closeSubscriptionFuture.completeExceptionally(ex); return null; }); } else { this.close().thenRun(() -> { closeSubscriptionFuture.complete(null); }).exceptionally(exception -> { log.error("[{}][{}] Error closing subscription", topicName, subName, exception); closeSubscriptionFuture.completeExceptionally(exception); return null; }); } // cursor close handles pending delete (ack) operations closeSubscriptionFuture.thenCompose(v -> topic.unsubscribe(subName)).thenAccept(v -> { synchronized (this) { (dispatcher != null ? dispatcher.close() : CompletableFuture.completedFuture(null)).thenRun(() -> { log.info("[{}][{}] Successfully deleted subscription", topicName, subName); deleteFuture.complete(null); }).exceptionally(ex -> { IS_FENCED_UPDATER.set(this, FALSE); if (dispatcher != null) { dispatcher.reset(); } log.error("[{}][{}] Error deleting subscription", topicName, subName, ex); deleteFuture.completeExceptionally(ex); return null; }); } }).exceptionally(exception -> { IS_FENCED_UPDATER.set(this, FALSE); log.error("[{}][{}] Error deleting subscription", topicName, subName, exception); deleteFuture.completeExceptionally(exception); return null; }); return deleteFuture; } /** * Handle unsubscribe command from the client API Check with the dispatcher is this consumer can proceed with * unsubscribe. * * @param consumer consumer object that is initiating the unsubscribe operation * @return CompletableFuture indicating the completion of unsubscribe operation */ @Override public CompletableFuture<Void> doUnsubscribe(Consumer consumer) { CompletableFuture<Void> future = new CompletableFuture<>(); try { if (dispatcher.canUnsubscribe(consumer)) { consumer.close(); return delete(); } future.completeExceptionally( new ServerMetadataException("Unconnected or shared consumer attempting to unsubscribe")); } catch (BrokerServiceException e) { log.warn("Error removing consumer {}", consumer); future.completeExceptionally(e); } return future; } @Override public List<Consumer> getConsumers() { Dispatcher dispatcher = this.dispatcher; if (dispatcher != null) { return dispatcher.getConsumers(); } else { return Collections.emptyList(); } } @Override public boolean expireMessages(int messageTTLInSeconds) { if ((getNumberOfEntriesInBacklog(false) == 0) || (dispatcher != null && dispatcher.isConsumerConnected() && getNumberOfEntriesInBacklog(false) < MINIMUM_BACKLOG_FOR_EXPIRY_CHECK && !topic.isOldestMessageExpired(cursor, messageTTLInSeconds))) { // don't do anything for almost caught-up connected subscriptions return false; } this.lastExpireTimestamp = System.currentTimeMillis(); return expiryMonitor.expireMessages(messageTTLInSeconds); } @Override public boolean expireMessages(Position position) { this.lastExpireTimestamp = System.currentTimeMillis(); return expiryMonitor.expireMessages(position); } public double getExpiredMessageRate() { return expiryMonitor.getMessageExpiryRate(); } public PersistentMessageExpiryMonitor getExpiryMonitor() { return expiryMonitor; } public long estimateBacklogSize() { return cursor.getEstimatedSizeSinceMarkDeletePosition(); } public SubscriptionStats getStats(Boolean getPreciseBacklog, boolean subscriptionBacklogSize) { SubscriptionStats subStats = new SubscriptionStats(); subStats.lastExpireTimestamp = lastExpireTimestamp; subStats.lastConsumedFlowTimestamp = lastConsumedFlowTimestamp; subStats.lastMarkDeleteAdvancedTimestamp = lastMarkDeleteAdvancedTimestamp; subStats.bytesOutCounter = bytesOutFromRemovedConsumers.longValue(); subStats.msgOutCounter = msgOutFromRemovedConsumer.longValue(); Dispatcher dispatcher = this.dispatcher; if (dispatcher != null) { Map<String, List<String>> consumerKeyHashRanges = getType() == SubType.Key_Shared ? ((PersistentStickyKeyDispatcherMultipleConsumers) dispatcher).getConsumerKeyHashRanges() : null; dispatcher.getConsumers().forEach(consumer -> { ConsumerStats consumerStats = consumer.getStats(); subStats.consumers.add(consumerStats); subStats.msgRateOut += consumerStats.msgRateOut; subStats.msgThroughputOut += consumerStats.msgThroughputOut; subStats.bytesOutCounter += consumerStats.bytesOutCounter; subStats.msgOutCounter += consumerStats.msgOutCounter; subStats.msgRateRedeliver += consumerStats.msgRateRedeliver; subStats.chuckedMessageRate += consumerStats.chuckedMessageRate; subStats.chunkedMessageRate += consumerStats.chunkedMessageRate; subStats.unackedMessages += consumerStats.unackedMessages; subStats.lastConsumedTimestamp = Math.max(subStats.lastConsumedTimestamp, consumerStats.lastConsumedTimestamp); subStats.lastAckedTimestamp = Math.max(subStats.lastAckedTimestamp, consumerStats.lastAckedTimestamp); if (consumerKeyHashRanges != null && consumerKeyHashRanges.containsKey(consumer.consumerName())) { consumerStats.keyHashRanges = consumerKeyHashRanges.get(consumer.consumerName()); } }); } subStats.type = getType(); if (dispatcher instanceof PersistentDispatcherSingleActiveConsumer) { Consumer activeConsumer = ((PersistentDispatcherSingleActiveConsumer) dispatcher).getActiveConsumer(); if (activeConsumer != null) { subStats.activeConsumerName = activeConsumer.consumerName(); } } if (Subscription.isIndividualAckMode(subStats.type)) { if (dispatcher instanceof PersistentDispatcherMultipleConsumers) { PersistentDispatcherMultipleConsumers d = (PersistentDispatcherMultipleConsumers) dispatcher; subStats.unackedMessages = d.getTotalUnackedMessages(); subStats.blockedSubscriptionOnUnackedMsgs = d.isBlockedDispatcherOnUnackedMsgs(); subStats.msgDelayed = d.getNumberOfDelayedMessages(); } } subStats.msgBacklog = getNumberOfEntriesInBacklog(getPreciseBacklog); if (subscriptionBacklogSize) { subStats.backlogSize = ((ManagedLedgerImpl) topic.getManagedLedger()) .getEstimatedBacklogSize((PositionImpl) cursor.getMarkDeletedPosition()); } subStats.msgBacklogNoDelayed = subStats.msgBacklog - subStats.msgDelayed; subStats.msgRateExpired = expiryMonitor.getMessageExpiryRate(); subStats.totalMsgExpired = expiryMonitor.getTotalMessageExpired(); subStats.isReplicated = isReplicated(); subStats.isDurable = cursor.isDurable(); if (getType() == SubType.Key_Shared && dispatcher instanceof PersistentStickyKeyDispatcherMultipleConsumers) { LinkedHashMap<Consumer, PositionImpl> recentlyJoinedConsumers = ((PersistentStickyKeyDispatcherMultipleConsumers) dispatcher).getRecentlyJoinedConsumers(); if (recentlyJoinedConsumers != null && recentlyJoinedConsumers.size() > 0) { recentlyJoinedConsumers.forEach((k, v) -> { subStats.consumersAfterMarkDeletePosition.put(k.consumerName(), v.toString()); }); } } subStats.nonContiguousDeletedMessagesRanges = cursor.getTotalNonContiguousDeletedMessagesRange(); subStats.nonContiguousDeletedMessagesRangesSerializedSize = cursor.getNonContiguousDeletedMessagesRangeSerializedSize(); return subStats; } @Override public void redeliverUnacknowledgedMessages(Consumer consumer) { Dispatcher dispatcher = getDispatcher(); if (dispatcher != null) { dispatcher.redeliverUnacknowledgedMessages(consumer); } } @Override public void redeliverUnacknowledgedMessages(Consumer consumer, List<PositionImpl> positions) { Dispatcher dispatcher = getDispatcher(); if (dispatcher != null) { dispatcher.redeliverUnacknowledgedMessages(consumer, positions); } } private void trimByMarkDeletePosition(List<PositionImpl> positions) { positions.removeIf(position -> cursor.getMarkDeletedPosition() != null && position.compareTo((PositionImpl) cursor.getMarkDeletedPosition()) <= 0); } @Override public void addUnAckedMessages(int unAckMessages) { dispatcher.addUnAckedMessages(unAckMessages); } @Override public synchronized long getNumberOfEntriesDelayed() { if (dispatcher != null) { return dispatcher.getNumberOfDelayedMessages(); } else { return 0; } } @Override public void markTopicWithBatchMessagePublished() { topic.markBatchMessagePublished(); } void topicTerminated() { if (cursor.getNumberOfEntriesInBacklog(false) == 0) { // notify the consumers if there are consumers connected to this topic. if (null != dispatcher) { // Immediately notify the consumer that there are no more available messages dispatcher.getConsumers().forEach(Consumer::reachedEndOfTopic); } } } /** * Return a merged map that contains the cursor properties specified by used * (eg. when using compaction subscription) and the subscription properties. */ protected Map<String, Long> mergeCursorProperties(Map<String, Long> userProperties) { Map<String, Long> baseProperties = isReplicated() ? REPLICATED_SUBSCRIPTION_CURSOR_PROPERTIES : NON_REPLICATED_SUBSCRIPTION_CURSOR_PROPERTIES; if (userProperties.isEmpty()) { // Use only the static instance in the common case return baseProperties; } else { Map<String, Long> merged = new TreeMap<>(); merged.putAll(userProperties); merged.putAll(baseProperties); return merged; } } @Override public void processReplicatedSubscriptionSnapshot(ReplicatedSubscriptionsSnapshot snapshot) { ReplicatedSubscriptionSnapshotCache snapshotCache = this.replicatedSubscriptionSnapshotCache; if (snapshotCache != null) { snapshotCache.addNewSnapshot(new ReplicatedSubscriptionsSnapshot().copyFrom(snapshot)); } } @Override public CompletableFuture<Void> endTxn(long txnidMostBits, long txnidLeastBits, int txnAction, long lowWaterMark) { TxnID txnID = new TxnID(txnidMostBits, txnidLeastBits); if (TxnAction.COMMIT.getValue() == txnAction) { return pendingAckHandle.commitTxn(txnID, Collections.emptyMap(), lowWaterMark); } else if (TxnAction.ABORT.getValue() == txnAction) { Consumer redeliverConsumer = null; if (getDispatcher() instanceof PersistentDispatcherSingleActiveConsumer) { redeliverConsumer = ((PersistentDispatcherSingleActiveConsumer) getDispatcher()).getActiveConsumer(); } return pendingAckHandle.abortTxn(txnID, redeliverConsumer, lowWaterMark); } else { return FutureUtil.failedFuture(new NotAllowedException("Unsupported txnAction " + txnAction)); } } @VisibleForTesting public ManagedCursor getCursor() { return cursor; } public void syncBatchPositionBitSetForPendingAck(PositionImpl position) { this.pendingAckHandle.syncBatchPositionAckSetForTransaction(position); } public boolean checkIsCanDeleteConsumerPendingAck(PositionImpl position) { return this.pendingAckHandle.checkIsCanDeleteConsumerPendingAck(position); } public boolean checkAndUnblockIfStuck() { return dispatcher != null ? dispatcher.checkAndUnblockIfStuck() : false; } private static final Logger log = LoggerFactory.getLogger(PersistentSubscription.class); }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.iot.deviceupdate; import com.azure.core.annotation.Generated; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceClient; import com.azure.core.annotation.ServiceMethod; import com.azure.core.exception.HttpResponseException; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.RequestOptions; import com.azure.core.http.rest.Response; import com.azure.core.util.BinaryData; import com.azure.core.util.polling.SyncPoller; import com.azure.iot.deviceupdate.implementation.DeviceManagementsImpl; import reactor.core.publisher.Mono; /** Initializes a new instance of the synchronous DeviceUpdateClient type. */ @ServiceClient(builder = DeviceUpdateClientBuilder.class) public final class DeviceManagementClient { @Generated private final DeviceManagementsImpl serviceClient; /** * Initializes an instance of DeviceManagements client. * * @param serviceClient the service client implementation. */ @Generated DeviceManagementClient(DeviceManagementsImpl serviceClient) { this.serviceClient = serviceClient; } /** * Gets a list of all device classes (unique combinations of device manufacturer and model) for all devices * connected to Device Update for IoT Hub. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * deviceClassId: String * compatProperties: { * String: String * } * bestCompatibleUpdateId: { * provider: String * name: String * version: String * } * } * ] * nextLink: String * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of all device classes (unique combinations of device manufacturer and model) for all devices * connected to Device Update for IoT Hub. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listDeviceClasses(RequestOptions requestOptions) { return this.serviceClient.listDeviceClasses(requestOptions); } /** * Gets the properties of a device class. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deviceClassId: String * compatProperties: { * String: String * } * bestCompatibleUpdateId: { * provider: String * name: String * version: String * } * } * }</pre> * * @param deviceClassId Device class identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the properties of a device class along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getDeviceClassWithResponse(String deviceClassId, RequestOptions requestOptions) { return this.serviceClient.getDeviceClassWithResponse(deviceClassId, requestOptions); } /** * Gets a list of installable updates for a device class. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * provider: String * name: String * version: String * } * ] * nextLink: String * } * }</pre> * * @param deviceClassId Device class identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of installable updates for a device class. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listInstallableUpdatesForDeviceClass( String deviceClassId, RequestOptions requestOptions) { return this.serviceClient.listInstallableUpdatesForDeviceClass(deviceClassId, requestOptions); } /** * Gets a list of devices connected to Device Update for IoT Hub. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>filter</td><td>String</td><td>No</td><td>Restricts the set of devices returned. You can filter on device GroupId or DeviceClassId.</td></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * deviceId: String * moduleId: String * deviceClassId: String * manufacturer: String * model: String * groupId: String * lastAttemptedUpdateId: { * provider: String * name: String * version: String * } * deploymentStatus: String(Succeeded/InProgress/Failed/Canceled/Incompatible) * installedUpdateId: (recursive schema, see installedUpdateId above) * onLatestUpdate: boolean * lastDeploymentId: String * lastInstallResult: { * resultCode: int * extendedResultCode: int * resultDetails: String * stepResults: [ * { * updateId: (recursive schema, see updateId above) * description: String * resultCode: int * extendedResultCode: int * resultDetails: String * } * ] * } * } * ] * nextLink: String * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of devices connected to Device Update for IoT Hub. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listDevices(RequestOptions requestOptions) { return this.serviceClient.listDevices(requestOptions); } /** * Import existing devices from IoT Hub. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * <tr><td>action</td><td>String</td><td>Yes</td><td>Devices action.</td></tr> * </table> * * <p><strong>Request Body Schema</strong> * * <pre>{@code * String(Devices/Modules/All) * }</pre> * * @param importType The types of devices to import. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link Response} on successful completion of {@link Mono}. */ @Generated @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) public SyncPoller<BinaryData, BinaryData> beginImportDevices(BinaryData importType, RequestOptions requestOptions) { return this.serviceClient.beginImportDevices(importType, requestOptions); } /** * Gets the device properties and latest deployment status for a device connected to Device Update for IoT Hub. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deviceId: String * moduleId: String * deviceClassId: String * manufacturer: String * model: String * groupId: String * lastAttemptedUpdateId: { * provider: String * name: String * version: String * } * deploymentStatus: String(Succeeded/InProgress/Failed/Canceled/Incompatible) * installedUpdateId: (recursive schema, see installedUpdateId above) * onLatestUpdate: boolean * lastDeploymentId: String * lastInstallResult: { * resultCode: int * extendedResultCode: int * resultDetails: String * stepResults: [ * { * updateId: (recursive schema, see updateId above) * description: String * resultCode: int * extendedResultCode: int * resultDetails: String * } * ] * } * } * }</pre> * * @param deviceId Device identifier in Azure IoT Hub. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the device properties and latest deployment status for a device connected to Device Update for IoT Hub * along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getDeviceWithResponse(String deviceId, RequestOptions requestOptions) { return this.serviceClient.getDeviceWithResponse(deviceId, requestOptions); } /** * Gets the device module properties and latest deployment status for a device module connected to Device Update for * IoT Hub. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deviceId: String * moduleId: String * deviceClassId: String * manufacturer: String * model: String * groupId: String * lastAttemptedUpdateId: { * provider: String * name: String * version: String * } * deploymentStatus: String(Succeeded/InProgress/Failed/Canceled/Incompatible) * installedUpdateId: (recursive schema, see installedUpdateId above) * onLatestUpdate: boolean * lastDeploymentId: String * lastInstallResult: { * resultCode: int * extendedResultCode: int * resultDetails: String * stepResults: [ * { * updateId: (recursive schema, see updateId above) * description: String * resultCode: int * extendedResultCode: int * resultDetails: String * } * ] * } * } * }</pre> * * @param deviceId Device identifier in Azure IoT Hub. * @param moduleId Device module identifier in Azure IoT Hub. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the device module properties and latest deployment status for a device module connected to Device Update * for IoT Hub along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getDeviceModuleWithResponse( String deviceId, String moduleId, RequestOptions requestOptions) { return this.serviceClient.getDeviceModuleWithResponse(deviceId, moduleId, requestOptions); } /** * Gets the breakdown of how many devices are on their latest update, have new updates available, or are in progress * receiving new updates. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * totalDeviceCount: int * onLatestUpdateDeviceCount: int * newUpdatesAvailableDeviceCount: int * updatesInProgressDeviceCount: int * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the breakdown of how many devices are on their latest update, have new updates available, or are in * progress receiving new updates along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getUpdateComplianceWithResponse(RequestOptions requestOptions) { return this.serviceClient.getUpdateComplianceWithResponse(requestOptions); } /** * Gets a list of available group device tags for all devices connected to Device Update for IoT Hub. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * tagName: String * deviceCount: int * } * ] * nextLink: String * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of available group device tags for all devices connected to Device Update for IoT Hub. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listDeviceTags(RequestOptions requestOptions) { return this.serviceClient.listDeviceTags(requestOptions); } /** * Gets a count of how many devices have a device tag. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * tagName: String * deviceCount: int * } * }</pre> * * @param tagName Tag name. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a count of how many devices have a device tag along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getDeviceTagWithResponse(String tagName, RequestOptions requestOptions) { return this.serviceClient.getDeviceTagWithResponse(tagName, requestOptions); } /** * Gets a list of all device groups. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * groupId: String * groupType: String(DeviceClassIdAndIoTHubTag/InvalidDeviceClassIdAndIoTHubTag/DefaultDeviceClassId) * tags: [ * String * ] * createdDateTime: String * deviceCount: Integer * deploymentId: String * deviceClassId: String * } * ] * nextLink: String * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of all device groups. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listGroups(RequestOptions requestOptions) { return this.serviceClient.listGroups(requestOptions); } /** * Gets the properties of a group. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * groupId: String * groupType: String(DeviceClassIdAndIoTHubTag/InvalidDeviceClassIdAndIoTHubTag/DefaultDeviceClassId) * tags: [ * String * ] * createdDateTime: String * deviceCount: Integer * deploymentId: String * deviceClassId: String * } * }</pre> * * @param groupId Group identity. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the properties of a group along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getGroupWithResponse(String groupId, RequestOptions requestOptions) { return this.serviceClient.getGroupWithResponse(groupId, requestOptions); } /** * Create or update a device group. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Request Body Schema</strong> * * <pre>{@code * { * groupId: String * groupType: String(DeviceClassIdAndIoTHubTag/InvalidDeviceClassIdAndIoTHubTag/DefaultDeviceClassId) * tags: [ * String * ] * createdDateTime: String * deviceCount: Integer * deploymentId: String * deviceClassId: String * } * }</pre> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * groupId: String * groupType: String(DeviceClassIdAndIoTHubTag/InvalidDeviceClassIdAndIoTHubTag/DefaultDeviceClassId) * tags: [ * String * ] * createdDateTime: String * deviceCount: Integer * deploymentId: String * deviceClassId: String * } * }</pre> * * @param groupId Group identity. * @param group The group properties. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return group details along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> createOrUpdateGroupWithResponse( String groupId, BinaryData group, RequestOptions requestOptions) { return this.serviceClient.createOrUpdateGroupWithResponse(groupId, group, requestOptions); } /** * Deletes a device group. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * @param groupId Group identity. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteGroupWithResponse(String groupId, RequestOptions requestOptions) { return this.serviceClient.deleteGroupWithResponse(groupId, requestOptions); } /** * Get group update compliance information such as how many devices are on their latest update, how many need new * updates, and how many are in progress on receiving a new update. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * totalDeviceCount: int * onLatestUpdateDeviceCount: int * newUpdatesAvailableDeviceCount: int * updatesInProgressDeviceCount: int * } * }</pre> * * @param groupId Group identity. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return group update compliance information such as how many devices are on their latest update, how many need * new updates, and how many are in progress on receiving a new update along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getGroupUpdateComplianceWithResponse(String groupId, RequestOptions requestOptions) { return this.serviceClient.getGroupUpdateComplianceWithResponse(groupId, requestOptions); } /** * Get the best available updates for a group and a count of how many devices need each update. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>filter</td><td>String</td><td>No</td><td>Restricts the set of bestUpdates returned. You can filter on update Provider, Name and Version property.</td></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * updateId: { * provider: String * name: String * version: String * } * deviceCount: int * } * ] * nextLink: String * } * }</pre> * * @param groupId Group identity. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the best available updates for a group and a count of how many devices need each update. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listBestUpdatesForGroup(String groupId, RequestOptions requestOptions) { return this.serviceClient.listBestUpdatesForGroup(groupId, requestOptions); } /** * Gets a list of deployments for a group. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>filter</td><td>String</td><td>No</td><td>Restricts the set of deployments returned. You can filter on update Provider, Name and Version property.</td></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * deploymentId: String * startDateTime: String * updateId: { * provider: String * name: String * version: String * } * groupId: String * isCanceled: Boolean * isRetried: Boolean * } * ] * nextLink: String * } * }</pre> * * @param groupId Group identity. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of deployments for a group. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listDeploymentsForGroup(String groupId, RequestOptions requestOptions) { return this.serviceClient.listDeploymentsForGroup(groupId, requestOptions); } /** * Gets the properties of a deployment. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deploymentId: String * startDateTime: String * updateId: { * provider: String * name: String * version: String * } * groupId: String * isCanceled: Boolean * isRetried: Boolean * } * }</pre> * * @param groupId Group identity. * @param deploymentId Deployment identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the properties of a deployment along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getDeploymentWithResponse( String groupId, String deploymentId, RequestOptions requestOptions) { return this.serviceClient.getDeploymentWithResponse(groupId, deploymentId, requestOptions); } /** * Creates or updates a deployment. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Request Body Schema</strong> * * <pre>{@code * { * deploymentId: String * startDateTime: String * updateId: { * provider: String * name: String * version: String * } * groupId: String * isCanceled: Boolean * isRetried: Boolean * } * }</pre> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deploymentId: String * startDateTime: String * updateId: { * provider: String * name: String * version: String * } * groupId: String * isCanceled: Boolean * isRetried: Boolean * } * }</pre> * * @param deploymentId Deployment identifier. * @param groupId Group identity. * @param deployment The deployment properties. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return deployment metadata along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> createOrUpdateDeploymentWithResponse( String deploymentId, String groupId, BinaryData deployment, RequestOptions requestOptions) { return this.serviceClient.createOrUpdateDeploymentWithResponse( deploymentId, groupId, deployment, requestOptions); } /** * Deletes a deployment. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * @param groupId Group identity. * @param deploymentId Deployment identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteDeploymentWithResponse( String groupId, String deploymentId, RequestOptions requestOptions) { return this.serviceClient.deleteDeploymentWithResponse(groupId, deploymentId, requestOptions); } /** * Gets the status of a deployment including a breakdown of how many devices in the deployment are in progress, * completed, or failed. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deploymentState: String(Active/Inactive/Canceled) * totalDevices: Integer * devicesInProgressCount: Integer * devicesCompletedFailedCount: Integer * devicesCompletedSucceededCount: Integer * devicesCanceledCount: Integer * } * }</pre> * * @param groupId Group identity. * @param deploymentId Deployment identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the status of a deployment including a breakdown of how many devices in the deployment are in progress, * completed, or failed along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getDeploymentStatusWithResponse( String groupId, String deploymentId, RequestOptions requestOptions) { return this.serviceClient.getDeploymentStatusWithResponse(groupId, deploymentId, requestOptions); } /** * Gets a list of devices in a deployment along with their state. Useful for getting a list of failed devices. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>filter</td><td>String</td><td>No</td><td>Restricts the set of deployment device states returned. You can filter on deviceId and moduleId and/or deviceState.</td></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * deviceId: String * moduleId: String * retryCount: int * movedOnToNewDeployment: boolean * deviceState: String(Succeeded/InProgress/Failed/Canceled/Incompatible) * } * ] * nextLink: String * } * }</pre> * * @param groupId Group identity. * @param deploymentId Deployment identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of devices in a deployment along with their state. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listDeploymentDevices( String groupId, String deploymentId, RequestOptions requestOptions) { return this.serviceClient.listDeploymentDevices(groupId, deploymentId, requestOptions); } /** * Retrieve operation status. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Header Parameters</strong> * * <table border="1"> * <caption>Header Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>ifNoneMatch</td><td>String</td><td>No</td><td>Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value.</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * operationId: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * error: { * code: String * message: String * target: String * details: [ * (recursive schema, see above) * ] * innererror: { * code: String * message: String * errorDetail: String * innerError: (recursive schema, see innerError above) * } * occurredDateTime: String * } * traceId: String * lastActionDateTime: String * createdDateTime: String * etag: String * } * }</pre> * * @param operationId Operation identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return operation metadata along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getOperationWithResponse(String operationId, RequestOptions requestOptions) { return this.serviceClient.getOperationWithResponse(operationId, requestOptions); } /** * Get a list of all device import operations. Completed operations are kept for 7 days before auto-deleted. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>filter</td><td>String</td><td>No</td><td>Restricts the set of operations returned. Only one specific filter is supported: "status eq 'NotStarted' or status eq 'Running'"</td></tr> * <tr><td>top</td><td>String</td><td>No</td><td>Specifies a non-negative integer n that limits the number of items returned from a collection. The service returns the number of available items up to but not greater than the specified value n.</td></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * operationId: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * error: { * code: String * message: String * target: String * details: [ * (recursive schema, see above) * ] * innererror: { * code: String * message: String * errorDetail: String * innerError: (recursive schema, see innerError above) * } * occurredDateTime: String * } * traceId: String * lastActionDateTime: String * createdDateTime: String * etag: String * } * ] * nextLink: String * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return a list of all device import operations. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listOperations(RequestOptions requestOptions) { return this.serviceClient.listOperations(requestOptions); } /** * Start the device diagnostics log collection operation on specified devices. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Request Body Schema</strong> * * <pre>{@code * { * operationId: String * deviceList: [ * { * deviceId: String * moduleId: String * } * ] * description: String * createdDateTime: String * lastActionDateTime: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * } * }</pre> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * operationId: String * deviceList: [ * { * deviceId: String * moduleId: String * } * ] * description: String * createdDateTime: String * lastActionDateTime: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * } * }</pre> * * @param operationId Operation identifier. * @param logCollectionRequest The deployment properties. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return diagnostics request body along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> collectLogsWithResponse( String operationId, BinaryData logCollectionRequest, RequestOptions requestOptions) { return this.serviceClient.collectLogsWithResponse(operationId, logCollectionRequest, requestOptions); } /** * Get the device diagnostics log collection operation. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * operationId: String * deviceList: [ * { * deviceId: String * moduleId: String * } * ] * description: String * createdDateTime: String * lastActionDateTime: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * } * }</pre> * * @param operationId Operation identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the device diagnostics log collection operation along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getLogCollectionOperationWithResponse( String operationId, RequestOptions requestOptions) { return this.serviceClient.getLogCollectionOperationWithResponse(operationId, requestOptions); } /** * Get all device diagnostics log collection operations. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * operationId: String * deviceList: [ * { * deviceId: String * moduleId: String * } * ] * description: String * createdDateTime: String * lastActionDateTime: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * } * ] * nextLink: String * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return all device diagnostics log collection operations. */ @Generated @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listLogCollectionOperations(RequestOptions requestOptions) { return this.serviceClient.listLogCollectionOperations(requestOptions); } /** * Get device diagnostics log collection operation with detailed status. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * operationId: String * createdDateTime: String * lastActionDateTime: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * deviceStatus: [ * { * deviceId: String * moduleId: String * status: String(Undefined/NotStarted/Running/Succeeded/Failed) * resultCode: String * extendedResultCode: String * logLocation: String * } * ] * description: String * } * }</pre> * * @param operationId Operation identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return device diagnostics log collection operation with detailed status along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getLogCollectionOperationDetailedStatusWithResponse( String operationId, RequestOptions requestOptions) { return this.serviceClient.getLogCollectionOperationDetailedStatusWithResponse(operationId, requestOptions); } /** * Stops a deployment. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>action</td><td>String</td><td>Yes</td><td>Cancel deployment action.</td></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deploymentId: String * startDateTime: String * updateId: { * provider: String * name: String * version: String * } * groupId: String * isCanceled: Boolean * isRetried: Boolean * } * }</pre> * * @param groupId Group identity. * @param deploymentId Deployment identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return deployment metadata along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> stopDeploymentWithResponse( String groupId, String deploymentId, RequestOptions requestOptions) { return this.serviceClient.stopDeploymentWithResponse(groupId, deploymentId, requestOptions); } /** * Retries a deployment with failed devices. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>action</td><td>String</td><td>Yes</td><td>Retry deployment action.</td></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * deploymentId: String * startDateTime: String * updateId: { * provider: String * name: String * version: String * } * groupId: String * isCanceled: Boolean * isRetried: Boolean * } * }</pre> * * @param groupId Group identity. * @param deploymentId Deployment identifier. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return deployment metadata along with {@link Response}. */ @Generated @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> retryDeploymentWithResponse( String groupId, String deploymentId, RequestOptions requestOptions) { return this.serviceClient.retryDeploymentWithResponse(groupId, deploymentId, requestOptions); } }
package demo.java.v2c06.InternalFrameTest; import java.awt.*; import java.awt.event.*; import java.beans.*; import javax.swing.*; /** * This program demonstrates the use of internal frames. * @version 1.11 2007-08-01 * @author Cay Horstmann */ public class InternalFrameTest { public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { public void run() { JFrame frame = new DesktopFrame(); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); } }); } } /** * This desktop frame contains editor panes that show HTML documents. */ class DesktopFrame extends JFrame { public DesktopFrame() { setTitle("InternalFrameTest"); setSize(DEFAULT_WIDTH, DEFAULT_HEIGHT); desktop = new JDesktopPane(); add(desktop, BorderLayout.CENTER); // set up menus JMenuBar menuBar = new JMenuBar(); setJMenuBar(menuBar); JMenu fileMenu = new JMenu("File"); menuBar.add(fileMenu); JMenuItem openItem = new JMenuItem("New"); openItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { createInternalFrame(new JLabel(new ImageIcon(planets[counter] + ".gif")), planets[counter]); counter = (counter + 1) % planets.length; } }); fileMenu.add(openItem); JMenuItem exitItem = new JMenuItem("Exit"); exitItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { System.exit(0); } }); fileMenu.add(exitItem); JMenu windowMenu = new JMenu("Window"); menuBar.add(windowMenu); JMenuItem nextItem = new JMenuItem("Next"); nextItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { selectNextWindow(); } }); windowMenu.add(nextItem); JMenuItem cascadeItem = new JMenuItem("Cascade"); cascadeItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { cascadeWindows(); } }); windowMenu.add(cascadeItem); JMenuItem tileItem = new JMenuItem("Tile"); tileItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { tileWindows(); } }); windowMenu.add(tileItem); final JCheckBoxMenuItem dragOutlineItem = new JCheckBoxMenuItem("Drag Outline"); dragOutlineItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { desktop.setDragMode(dragOutlineItem.isSelected() ? JDesktopPane.OUTLINE_DRAG_MODE : JDesktopPane.LIVE_DRAG_MODE); } }); windowMenu.add(dragOutlineItem); } /** * Creates an internal frame on the desktop. * @param c the component to display in the internal frame * @param t the title of the internal frame. */ public void createInternalFrame(Component c, String t) { final JInternalFrame iframe = new JInternalFrame(t, true, // resizable true, // closable true, // maximizable true); // iconifiable iframe.add(c, BorderLayout.CENTER); desktop.add(iframe); iframe.setFrameIcon(new ImageIcon("document.gif")); // add listener to confirm frame closing iframe.addVetoableChangeListener(new VetoableChangeListener() { public void vetoableChange(PropertyChangeEvent event) throws PropertyVetoException { String name = event.getPropertyName(); Object value = event.getNewValue(); // we only want to check attempts to close a frame if (name.equals("closed") && value.equals(true)) { // ask user if it is ok to close int result = JOptionPane.showInternalConfirmDialog(iframe, "OK to close?", "Select an Option", JOptionPane.YES_NO_OPTION); // if the user doesn't agree, veto the close if (result != JOptionPane.YES_OPTION) throw new PropertyVetoException( "User canceled close", event); } } }); // position frame int width = desktop.getWidth() / 2; int height = desktop.getHeight() / 2; iframe.reshape(nextFrameX, nextFrameY, width, height); iframe.show(); // select the frame--might be vetoed try { iframe.setSelected(true); } catch (PropertyVetoException e) { } frameDistance = iframe.getHeight() - iframe.getContentPane().getHeight(); // compute placement for next frame nextFrameX += frameDistance; nextFrameY += frameDistance; if (nextFrameX + width > desktop.getWidth()) nextFrameX = 0; if (nextFrameY + height > desktop.getHeight()) nextFrameY = 0; } /** * Cascades the non-iconified internal frames of the desktop. */ public void cascadeWindows() { int x = 0; int y = 0; int width = desktop.getWidth() / 2; int height = desktop.getHeight() / 2; for (JInternalFrame frame : desktop.getAllFrames()) { if (!frame.isIcon()) { try { // try to make maximized frames resizable; this might be vetoed frame.setMaximum(false); frame.reshape(x, y, width, height); x += frameDistance; y += frameDistance; // wrap around at the desktop edge if (x + width > desktop.getWidth()) x = 0; if (y + height > desktop.getHeight()) y = 0; } catch (PropertyVetoException e) { } } } } /** * Tiles the non-iconified internal frames of the desktop. */ public void tileWindows() { // count frames that aren't iconized int frameCount = 0; for (JInternalFrame frame : desktop.getAllFrames()) if (!frame.isIcon()) frameCount++; if (frameCount == 0) return; int rows = (int) Math.sqrt(frameCount); int cols = frameCount / rows; int extra = frameCount % rows; // number of columns with an extra row int width = desktop.getWidth() / cols; int height = desktop.getHeight() / rows; int r = 0; int c = 0; for (JInternalFrame frame : desktop.getAllFrames()) { if (!frame.isIcon()) { try { frame.setMaximum(false); frame.reshape(c * width, r * height, width, height); r++; if (r == rows) { r = 0; c++; if (c == cols - extra) { // start adding an extra row rows++; height = desktop.getHeight() / rows; } } } catch (PropertyVetoException e) { } } } } /** * Brings the next non-iconified internal frame to the front. */ public void selectNextWindow() { JInternalFrame[] frames = desktop.getAllFrames(); for (int i = 0; i < frames.length; i++) { if (frames[i].isSelected()) { // find next frame that isn't an icon and can be selected int next = (i + 1) % frames.length; while (next != i) { if (!frames[next].isIcon()) { try { // all other frames are icons or veto selection frames[next].setSelected(true); frames[next].toFront(); frames[i].toBack(); return; } catch (PropertyVetoException e) { } } next = (next + 1) % frames.length; } } } } private JDesktopPane desktop; private int nextFrameX; private int nextFrameY; private int frameDistance; private int counter; private static final String[] planets = { "Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Uranus", "Neptune", "Pluto", }; private static final int DEFAULT_WIDTH = 600; private static final int DEFAULT_HEIGHT = 400; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguousUnderConstruction; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.junit.Test; import java.io.IOException; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; /** * Verify that TestCommitBlockSynchronization is idempotent. */ public class TestCommitBlockSynchronization { private static final long blockId = 100; private static final long length = 200; private static final long genStamp = 300; private FSNamesystem makeNameSystemSpy(Block block, INodeFile file) throws IOException { Configuration conf = new Configuration(); FSImage image = new FSImage(conf); final DatanodeStorageInfo[] targets = {}; FSNamesystem namesystem = new FSNamesystem(conf, image); namesystem.setImageLoaded(true); // set file's parent as root and put the file to inodeMap, so // FSNamesystem's isFileDeleted() method will return false on this file if (file.getParent() == null) { INodeDirectory mparent = mock(INodeDirectory.class); INodeDirectory parent = new INodeDirectory(mparent.getId(), new byte[0], mparent.getPermissionStatus(), mparent.getAccessTime()); parent.setLocalName(new byte[0]); parent.addChild(file); file.setParent(parent); } namesystem.dir.getINodeMap().put(file); FSNamesystem namesystemSpy = spy(namesystem); BlockInfoContiguousUnderConstruction blockInfo = new BlockInfoContiguousUnderConstruction( block, (short) 1, HdfsServerConstants.BlockUCState.UNDER_CONSTRUCTION, targets); blockInfo.setBlockCollection(file); blockInfo.setGenerationStamp(genStamp); blockInfo.initializeBlockRecovery(genStamp); doReturn(true).when(file).removeLastBlock(any(Block.class)); doReturn(true).when(file).isUnderConstruction(); doReturn(blockInfo).when(namesystemSpy).getStoredBlock(any(Block.class)); doReturn(blockInfo).when(file).getLastBlock(); doReturn("").when(namesystemSpy).closeFileCommitBlocks( any(INodeFile.class), any(BlockInfoContiguous.class)); doReturn(mock(FSEditLog.class)).when(namesystemSpy).getEditLog(); return namesystemSpy; } private INodeFile mockFileUnderConstruction() { INodeFile file = mock(INodeFile.class); return file; } @Test public void testCommitBlockSynchronization() throws IOException { INodeFile file = mockFileUnderConstruction(); Block block = new Block(blockId, length, genStamp); FSNamesystem namesystemSpy = makeNameSystemSpy(block, file); DatanodeID[] newTargets = new DatanodeID[0]; ExtendedBlock lastBlock = new ExtendedBlock(); namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, false, false, newTargets, null); // Repeat the call to make sure it does not throw namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, false, false, newTargets, null); // Simulate 'completing' the block. BlockInfoContiguous completedBlockInfo = new BlockInfoContiguous(block, (short) 1); completedBlockInfo.setBlockCollection(file); completedBlockInfo.setGenerationStamp(genStamp); doReturn(completedBlockInfo).when(namesystemSpy) .getStoredBlock(any(Block.class)); doReturn(completedBlockInfo).when(file).getLastBlock(); // Repeat the call to make sure it does not throw namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, false, false, newTargets, null); } @Test public void testCommitBlockSynchronization2() throws IOException { INodeFile file = mockFileUnderConstruction(); Block block = new Block(blockId, length, genStamp); FSNamesystem namesystemSpy = makeNameSystemSpy(block, file); DatanodeID[] newTargets = new DatanodeID[0]; ExtendedBlock lastBlock = new ExtendedBlock(); namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, false, false, newTargets, null); // Make sure the call fails if the generation stamp does not match // the block recovery ID. try { namesystemSpy.commitBlockSynchronization( lastBlock, genStamp - 1, length, false, false, newTargets, null); fail("Failed to get expected IOException on generation stamp/" + "recovery ID mismatch"); } catch (IOException ioe) { // Expected exception. } } @Test public void testCommitBlockSynchronizationWithDelete() throws IOException { INodeFile file = mockFileUnderConstruction(); Block block = new Block(blockId, length, genStamp); FSNamesystem namesystemSpy = makeNameSystemSpy(block, file); DatanodeID[] newTargets = new DatanodeID[0]; ExtendedBlock lastBlock = new ExtendedBlock(); namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, false, true, newTargets, null); // Simulate removing the last block from the file. doReturn(false).when(file).removeLastBlock(any(Block.class)); // Repeat the call to make sure it does not throw namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, false, true, newTargets, null); } @Test public void testCommitBlockSynchronizationWithClose() throws IOException { INodeFile file = mockFileUnderConstruction(); Block block = new Block(blockId, length, genStamp); FSNamesystem namesystemSpy = makeNameSystemSpy(block, file); DatanodeID[] newTargets = new DatanodeID[0]; ExtendedBlock lastBlock = new ExtendedBlock(); namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, true, false, newTargets, null); // Repeat the call to make sure it returns true namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, true, false, newTargets, null); BlockInfoContiguous completedBlockInfo = new BlockInfoContiguous(block, (short) 1); completedBlockInfo.setBlockCollection(file); completedBlockInfo.setGenerationStamp(genStamp); doReturn(completedBlockInfo).when(namesystemSpy) .getStoredBlock(any(Block.class)); doReturn(completedBlockInfo).when(file).getLastBlock(); namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, true, false, newTargets, null); } @Test public void testCommitBlockSynchronizationWithCloseAndNonExistantTarget() throws IOException { INodeFile file = mockFileUnderConstruction(); Block block = new Block(blockId, length, genStamp); FSNamesystem namesystemSpy = makeNameSystemSpy(block, file); DatanodeID[] newTargets = new DatanodeID[]{ new DatanodeID("0.0.0.0", "nonexistantHost", "1", 0, 0, 0, 0)}; ExtendedBlock lastBlock = new ExtendedBlock(); namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, true, false, newTargets, null); // Repeat the call to make sure it returns true namesystemSpy.commitBlockSynchronization( lastBlock, genStamp, length, true, false, newTargets, null); } }
/* * Copyright (c) 1996, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.misc; import static java.lang.Thread.State.*; import java.util.Properties; import java.util.HashMap; import java.util.Map; import java.util.Set; public class VM { /* The following methods used to be native methods that instruct * the VM to selectively suspend certain threads in low-memory * situations. They are inherently dangerous and not implementable * on native threads. We removed them in JDK 1.2. The skeletons * remain so that existing applications that use these methods * will still work. */ private static boolean suspended = false; /** @deprecated */ @Deprecated public static boolean threadsSuspended() { return suspended; } public static boolean allowThreadSuspension(ThreadGroup g, boolean b) { return g.allowThreadSuspension(b); } /** @deprecated */ @Deprecated public static boolean suspendThreads() { suspended = true; return true; } // Causes any suspended threadgroups to be resumed. /** @deprecated */ @Deprecated public static void unsuspendThreads() { suspended = false; } // Causes threadgroups no longer marked suspendable to be resumed. /** @deprecated */ @Deprecated public static void unsuspendSomeThreads() { } /* Deprecated fields and methods -- Memory advice not supported in 1.2 */ /** @deprecated */ @Deprecated public static final int STATE_GREEN = 1; /** @deprecated */ @Deprecated public static final int STATE_YELLOW = 2; /** @deprecated */ @Deprecated public static final int STATE_RED = 3; /** @deprecated */ @Deprecated public static final int getState() { return STATE_GREEN; } /** @deprecated */ @Deprecated public static void registerVMNotification(VMNotification n) { } /** @deprecated */ @Deprecated public static void asChange(int as_old, int as_new) { } /** @deprecated */ @Deprecated public static void asChange_otherthread(int as_old, int as_new) { } /* * Not supported in 1.2 because these will have to be exported as * JVM functions, and we are not sure we want do that. Leaving * here so it can be easily resurrected -- just remove the // * comments. */ /** * Resume Java profiling. All profiling data is added to any * earlier profiling, unless <code>resetJavaProfiler</code> is * called in between. If profiling was not started from the * command line, <code>resumeJavaProfiler</code> will start it. * <p> * * NOTE: Profiling must be enabled from the command line for a * java.prof report to be automatically generated on exit; if not, * writeJavaProfilerReport must be invoked to write a report. * * @see resetJavaProfiler * @see writeJavaProfilerReport */ // public native static void resumeJavaProfiler(); /** * Suspend Java profiling. */ // public native static void suspendJavaProfiler(); /** * Initialize Java profiling. Any accumulated profiling * information is discarded. */ // public native static void resetJavaProfiler(); /** * Write the current profiling contents to the file "java.prof". * If the file already exists, it will be overwritten. */ // public native static void writeJavaProfilerReport(); private static volatile boolean booted = false; // Invoked by by System.initializeSystemClass just before returning. // Subsystems that are invoked during initialization can check this // property in order to avoid doing things that should wait until the // application class loader has been set up. // public static void booted() { booted = true; } public static boolean isBooted() { return booted; } // A user-settable upper limit on the maximum amount of allocatable direct // buffer memory. This value may be changed during VM initialization if // "java" is launched with "-XX:MaxDirectMemorySize=<size>". // // The initial value of this field is arbitrary; during JRE initialization // it will be reset to the value specified on the command line, if any, // otherwise to Runtime.getRuntime.maxDirectMemory(). // private static long directMemory = 64 * 1024 * 1024; // Returns the maximum amount of allocatable direct buffer memory. // The directMemory variable is initialized during system initialization // in the saveAndRemoveProperties method. // public static long maxDirectMemory() { return directMemory; } // User-controllable flag that determines if direct buffers should be page // aligned. The "-XX:+PageAlignDirectMemory" option can be used to force // buffers, allocated by ByteBuffer.allocateDirect, to be page aligned. private static boolean pageAlignDirectMemory; // Returns {@code true} if the direct buffers should be page aligned. This // variable is initialized by saveAndRemoveProperties. public static boolean isDirectMemoryPageAligned() { return pageAlignDirectMemory; } // A user-settable boolean to determine whether ClassLoader.loadClass should // accept array syntax. This value may be changed during VM initialization // via the system property "sun.lang.ClassLoader.allowArraySyntax". // // The default for 1.5 is "true", array syntax is allowed. In 1.6, the // default will be "false". The presence of this system property to // control array syntax allows applications the ability to preview this new // behaviour. // private static boolean defaultAllowArraySyntax = false; private static boolean allowArraySyntax = defaultAllowArraySyntax; // The allowArraySyntax boolean is initialized during system initialization // in the saveAndRemoveProperties method. // // It is initialized based on the value of the system property // "sun.lang.ClassLoader.allowArraySyntax". If the system property is not // provided, the default for 1.5 is "true". In 1.6, the default will be // "false". If the system property is provided, then the value of // allowArraySyntax will be equal to "true" if Boolean.parseBoolean() // returns "true". Otherwise, the field will be set to "false". // public static boolean allowArraySyntax() { return allowArraySyntax; } /** * Returns the system property of the specified key saved at * system initialization time. This method should only be used * for the system properties that are not changed during runtime. * It accesses a private copy of the system properties so * that user's locking of the system properties object will not * cause the library to deadlock. * * Note that the saved system properties do not include * the ones set by sun.misc.Version.init(). * */ public static String getSavedProperty(String key) { if (savedProps.isEmpty()) throw new IllegalStateException("Should be non-empty if initialized"); return savedProps.getProperty(key); } // TODO: the Property Management needs to be refactored and // the appropriate prop keys need to be accessible to the // calling classes to avoid duplication of keys. private static final Properties savedProps = new Properties(); // Save a private copy of the system properties and remove // the system properties that are not intended for public access. // // This method can only be invoked during system initialization. public static void saveAndRemoveProperties(Properties props) { if (booted) throw new IllegalStateException("System initialization has completed"); savedProps.putAll(props); // Set the maximum amount of direct memory. This value is controlled // by the vm option -XX:MaxDirectMemorySize=<size>. // The maximum amount of allocatable direct buffer memory (in bytes) // from the system property sun.nio.MaxDirectMemorySize set by the VM. // The system property will be removed. String s = (String)props.remove("sun.nio.MaxDirectMemorySize"); if (s != null) { if (s.equals("-1")) { // -XX:MaxDirectMemorySize not given, take default directMemory = Runtime.getRuntime().maxMemory(); } else { long l = Long.parseLong(s); if (l > -1) directMemory = l; } } // Check if direct buffers should be page aligned s = (String)props.remove("sun.nio.PageAlignDirectMemory"); if ("true".equals(s)) pageAlignDirectMemory = true; // Set a boolean to determine whether ClassLoader.loadClass accepts // array syntax. This value is controlled by the system property // "sun.lang.ClassLoader.allowArraySyntax". s = props.getProperty("sun.lang.ClassLoader.allowArraySyntax"); allowArraySyntax = (s == null ? defaultAllowArraySyntax : Boolean.parseBoolean(s)); // Remove other private system properties // used by java.lang.Integer.IntegerCache props.remove("java.lang.Integer.IntegerCache.high"); // used by java.util.zip.ZipFile props.remove("sun.zip.disableMemoryMapping"); // used by sun.launcher.LauncherHelper props.remove("sun.java.launcher.diag"); } // Initialize any miscellenous operating system settings that need to be // set for the class libraries. // public static void initializeOSEnvironment() { if (!booted) { OSEnvironment.initialize(); } } /* Current count of objects pending for finalization */ private static volatile int finalRefCount = 0; /* Peak count of objects pending for finalization */ private static volatile int peakFinalRefCount = 0; /* * Gets the number of objects pending for finalization. * * @return the number of objects pending for finalization. */ public static int getFinalRefCount() { return finalRefCount; } /* * Gets the peak number of objects pending for finalization. * * @return the peak number of objects pending for finalization. */ public static int getPeakFinalRefCount() { return peakFinalRefCount; } /* * Add <tt>n</tt> to the objects pending for finalization count. * * @param n an integer value to be added to the objects pending * for finalization count */ public static void addFinalRefCount(int n) { // The caller must hold lock to synchronize the update. finalRefCount += n; if (finalRefCount > peakFinalRefCount) { peakFinalRefCount = finalRefCount; } } /** * Returns Thread.State for the given threadStatus */ public static Thread.State toThreadState(int threadStatus) { if ((threadStatus & JVMTI_THREAD_STATE_RUNNABLE) != 0) { return RUNNABLE; } else if ((threadStatus & JVMTI_THREAD_STATE_BLOCKED_ON_MONITOR_ENTER) != 0) { return BLOCKED; } else if ((threadStatus & JVMTI_THREAD_STATE_WAITING_INDEFINITELY) != 0) { return WAITING; } else if ((threadStatus & JVMTI_THREAD_STATE_WAITING_WITH_TIMEOUT) != 0) { return TIMED_WAITING; } else if ((threadStatus & JVMTI_THREAD_STATE_TERMINATED) != 0) { return TERMINATED; } else if ((threadStatus & JVMTI_THREAD_STATE_ALIVE) == 0) { return NEW; } else { return RUNNABLE; } } /* The threadStatus field is set by the VM at state transition * in the hotspot implementation. Its value is set according to * the JVM TI specification GetThreadState function. */ private final static int JVMTI_THREAD_STATE_ALIVE = 0x0001; private final static int JVMTI_THREAD_STATE_TERMINATED = 0x0002; private final static int JVMTI_THREAD_STATE_RUNNABLE = 0x0004; private final static int JVMTI_THREAD_STATE_BLOCKED_ON_MONITOR_ENTER = 0x0400; private final static int JVMTI_THREAD_STATE_WAITING_INDEFINITELY = 0x0010; private final static int JVMTI_THREAD_STATE_WAITING_WITH_TIMEOUT = 0x0020; static { initialize(); } private native static void initialize(); }
package com.atlassian.maven.plugins.amps.codegen.prompter.common.web; import java.util.Arrays; import java.util.List; import com.atlassian.maven.plugins.amps.codegen.prompter.AbstractModulePrompter; import com.atlassian.maven.plugins.amps.codegen.prompter.AbstractPrompterTest; import com.atlassian.maven.plugins.amps.codegen.prompter.PluginModulePrompter; import com.atlassian.plugins.codegen.modules.common.Condition; import com.atlassian.plugins.codegen.modules.common.Conditional; import com.atlassian.plugins.codegen.modules.common.Conditions; import com.atlassian.plugins.codegen.modules.common.Resource; import com.atlassian.plugins.codegen.modules.common.web.WebResourceProperties; import com.atlassian.plugins.codegen.modules.common.web.WebResourceTransformation; import org.apache.commons.lang.StringUtils; import org.codehaus.plexus.components.interactivity.Prompter; import org.codehaus.plexus.components.interactivity.PrompterException; import org.junit.Before; import org.junit.Test; import static com.atlassian.maven.plugins.amps.codegen.prompter.AbstractModulePrompter.MODULE_DESCRIP_PROMPT; import static com.atlassian.maven.plugins.amps.codegen.prompter.AbstractModulePrompter.MODULE_KEY_PROMPT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * @since 3.6 */ public class WebResourcePrompterTest extends AbstractPrompterTest { public static final String MODULE_NAME = "My Web Resource"; public static final String MODULE_KEY = "my-web-resource"; public static final String DESCRIPTION = "The My Web Resource Plugin"; public static final String I18N_NAME_KEY = "my-web-resource.name"; public static final String I18N_DESCRIPTION_KEY = "my-web-resource.description"; public static final String ADV_MODULE_NAME = "My Awesome Plugin"; public static final String ADV_MODULE_KEY = "awesome-module"; public static final String ADV_DESCRIPTION = "The Awesomest Plugin Ever"; public static final String ADV_I18N_NAME_KEY = "awesome-plugin.name"; public static final String ADV_I18N_DESCRIPTION_KEY = "pluginus-awesomeous.description"; public static final String RESOURCE_NAME = "resourceCSS"; public static final String RESOURCE_NAME_PATTERN = "templates/*.vm"; public static final String DOWNLOAD_TYPE = "download"; public static final String VELOCITY_TYPE = "velocity"; public static final String RESOURCE_CSS_PATH = "templates/resource.css"; public static final String RESOURCE_VM_PATH = "templates/resource.vm"; public static final String PARAM_KEY = "paramKey"; public static final String PARAM_VAL = "paramVal"; public static final String DEPENDENCY = "web.resources:ajs"; public static final String CUSTOM_CONTEXT = "my.context"; public static final String TRANS_EXTENSION = "css"; public static final String TRANS_KEY = "template"; public static final String CONDITIONS_TYPE = "AND"; public static final String CUSTOM_CONDITION = "com.atlassian.plugins.web.CustomCondition"; Prompter prompter; @Before public void setup() { prompter = mock(Prompter.class); } @Test public void basicPropertiesAreValid() throws PrompterException { when(prompter.prompt("Enter Plugin Module Name", "My Web Resource")).thenReturn(MODULE_NAME); when(prompter.prompt("Enter Resource Name (leave blank to use namePattern)")).thenReturn(RESOURCE_NAME) .thenReturn(""); when(prompter.prompt("Enter Resource Name Pattern")).thenReturn(RESOURCE_NAME_PATTERN); when(prompter.prompt("Enter Resource Type", "download")).thenReturn(DOWNLOAD_TYPE) .thenReturn(VELOCITY_TYPE); when(prompter.prompt("Enter Location (path to resource file)")).thenReturn(RESOURCE_CSS_PATH) .thenReturn(RESOURCE_VM_PATH); when(prompter.prompt("Add Resource Parameter?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("N"); when(prompter.prompt("params:\nparamKey->paramVal\nAdd Resource Parameter?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); when(prompter.prompt("param name")).thenReturn(PARAM_KEY); when(prompter.prompt("param value")).thenReturn(PARAM_VAL); when(prompter.prompt("Add Resource", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("N"); when(prompter.prompt("Show Advanced Setup?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); when(prompter.prompt("Include Example Code?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); WebResourcePrompter modulePrompter = new WebResourcePrompter(prompter); modulePrompter.setUseAnsiColor(false); WebResourceProperties props = modulePrompter.getModulePropertiesFromInput(moduleLocation); assertEquals("wrong module name", MODULE_NAME, props.getModuleName()); assertEquals("wrong module key", MODULE_KEY, props.getModuleKey()); assertEquals("wrong description", DESCRIPTION, props.getDescription()); assertEquals("wrong i18n name key", I18N_NAME_KEY, props.getNameI18nKey()); assertEquals("wrong i18n desc key", I18N_DESCRIPTION_KEY, props.getDescriptionI18nKey()); } @Test public void advancedPropertiesAreValid() throws PrompterException { when(prompter.prompt("Enter Plugin Module Name", "My Web Resource")).thenReturn(MODULE_NAME); when(prompter.prompt("Enter Resource Name (leave blank to use namePattern)")).thenReturn(RESOURCE_NAME) .thenReturn(""); when(prompter.prompt("Enter Resource Name Pattern")).thenReturn(RESOURCE_NAME_PATTERN); when(prompter.prompt("Enter Resource Type", "download")).thenReturn(DOWNLOAD_TYPE) .thenReturn(VELOCITY_TYPE); when(prompter.prompt("Enter Location (path to resource file)")).thenReturn(RESOURCE_CSS_PATH) .thenReturn(RESOURCE_VM_PATH); when(prompter.prompt("Add Resource Parameter?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("N"); when(prompter.prompt("params:\nparamKey->paramVal\nAdd Resource Parameter?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); when(prompter.prompt("param name")).thenReturn(PARAM_KEY); when(prompter.prompt("param value")).thenReturn(PARAM_VAL); when(prompter.prompt("Add Resource", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("N"); when(prompter.prompt("Show Advanced Setup?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y"); when(prompter.prompt(MODULE_KEY_PROMPT, MODULE_KEY)).thenReturn(ADV_MODULE_KEY); when(prompter.prompt(MODULE_DESCRIP_PROMPT, DESCRIPTION)).thenReturn(ADV_DESCRIPTION); when(prompter.prompt("i18n Name Key", I18N_NAME_KEY)).thenReturn(ADV_I18N_NAME_KEY); when(prompter.prompt("i18n Description Key", I18N_DESCRIPTION_KEY)).thenReturn(ADV_I18N_DESCRIPTION_KEY); when(prompter.prompt("Add Dependency?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y"); when(prompter.prompt("Enter Dependency")).thenReturn(DEPENDENCY); when(prompter.prompt("values:\nweb.resources:ajs\nAdd Dependency?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); when(prompter.prompt("Add Web Resource Context?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("Y") .thenReturn("N"); when(prompter.prompt("Choose A Context\n1: atl.general\n2: atl.admin\n3: atl.userprofile\n4: Custom Context\nChoose a number: ", Arrays.asList("1", "2", "3", "4"), "1")).thenReturn("2"); when(prompter.prompt("Choose A Context\n1: atl.general\n2: atl.userprofile\n3: Custom Context\nChoose a number: ", Arrays.asList("1", "2", "3"), "1")).thenReturn("3"); when(prompter.prompt("Enter Context")).thenReturn(CUSTOM_CONTEXT); when(prompter.prompt("Add Web Resource Transformation?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("N"); when(prompter.prompt("File Extension")).thenReturn(TRANS_EXTENSION); when(prompter.prompt("Transformer Key")).thenReturn(TRANS_KEY); when(prompter.prompt("Add Transformer Key?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); when(prompter.prompt("Add Conditions?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("N"); when(prompter.prompt("Condition Type", PluginModulePrompter.ANDOR_ANSWERS, "AND")).thenReturn(CONDITIONS_TYPE); when(prompter.prompt("Enter Fully Qualified Condition Class", AbstractModulePrompter.DEFAULT_BASE_PACKAGE + ".web.condition.MyCondition")).thenReturn(CUSTOM_CONDITION); when(prompter.prompt("Add Condition?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); when(prompter.prompt("Add Condition Parameter?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y") .thenReturn("N"); when(prompter.prompt("params:\nparamKey->paramVal\nAdd Condition Parameter?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); when(prompter.prompt("Invert Condition?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("Y"); when(prompter.prompt("Include Example Code?", PluginModulePrompter.YN_ANSWERS, "N")).thenReturn("N"); WebResourcePrompter modulePrompter = new WebResourcePrompter(prompter); modulePrompter.setUseAnsiColor(false); WebResourceProperties props = modulePrompter.getModulePropertiesFromInput(moduleLocation); assertEquals("wrong adv module name", MODULE_NAME, props.getModuleName()); assertEquals("wrong adv module key", ADV_MODULE_KEY, props.getModuleKey()); assertEquals("wrong adv description", ADV_DESCRIPTION, props.getDescription()); assertEquals("wrong adv i18n name key", ADV_I18N_NAME_KEY, props.getNameI18nKey()); assertEquals("wrong adv i18n desc key", ADV_I18N_DESCRIPTION_KEY, props.getDescriptionI18nKey()); //resources List<Resource> resources = props.getResources(); assertTrue("resources not found", !resources.isEmpty()); assertEquals("wrong number of resources", 2, resources.size()); Resource cssResource = resources.get(0); Resource vmResource = resources.get(1); assertEquals("wrong css resource name", RESOURCE_NAME, cssResource.getName()); assertTrue("css name pattern found when name is set", StringUtils.isBlank(cssResource.getNamePattern())); assertEquals("wrong css resource type", DOWNLOAD_TYPE, cssResource.getType()); assertEquals("wrong css resource location", RESOURCE_CSS_PATH, cssResource.getLocation()); assertEquals("wrong number of css resource params", 1, cssResource.getParams() .size()); assertTrue("css resource param key not found", cssResource.getParams() .containsKey(PARAM_KEY)); assertEquals("wrong css resource param value", PARAM_VAL, cssResource.getParams() .get(PARAM_KEY)); assertTrue("vm name found when name pattern is set", StringUtils.isBlank(vmResource.getName())); assertEquals("wrong vm resource name pattern", RESOURCE_NAME_PATTERN, vmResource.getNamePattern()); assertEquals("wrong vm resource type", VELOCITY_TYPE, vmResource.getType()); assertEquals("wrong vm resource location", RESOURCE_VM_PATH, vmResource.getLocation()); assertEquals("wrong number of vm resource params", 0, vmResource.getParams() .size()); //dependencies List<String> dependencies = props.getDependencies(); assertEquals("wrong number of dependencies", 1, dependencies.size()); assertEquals("wronf dependency value", DEPENDENCY, dependencies.get(0)); //contexts List<String> contexts = props.getContexts(); assertEquals("wrong number of contexts", 2, contexts.size()); assertTrue("admin context not found", contexts.contains("atl.admin")); assertTrue("custom context not found", contexts.contains(CUSTOM_CONTEXT)); //transformations List<WebResourceTransformation> transformations = props.getTransformations(); assertEquals("wrong number of transformations", 1, transformations.size()); WebResourceTransformation transformation = transformations.get(0); assertEquals("wrong transformation extension", TRANS_EXTENSION, transformation.getExtension()); List<String> keys = transformation.getTransformerKeys(); assertEquals("wrong number of transfromer keys", 1, keys.size()); assertEquals("worng transformer key", TRANS_KEY, keys.get(0)); //conditions List<Conditional> conditionals = props.getConditions(); assertEquals("wrong number of conditionals", 1, conditionals.size()); Conditional conditional = conditionals.get(0); assertTrue("first conditional is not Conditions", (conditional instanceof Conditions)); Conditions conditions = (Conditions) conditional; assertEquals("wrong conditions type", CONDITIONS_TYPE, conditions.getType()); List<Conditional> nestedConditions = conditions.getConditions(); assertEquals("wrong number of nested conditiomals", 1, nestedConditions.size()); Conditional nestedConditional = nestedConditions.get(0); assertTrue("nested conditional is not a Condition", (nestedConditional instanceof Condition)); Condition condition = (Condition) nestedConditional; assertEquals("wrong number of condition params", 1, condition.getParams() .size()); assertTrue("condition param key not found", condition.getParams() .containsKey(PARAM_KEY)); assertEquals("wrong condition param value", PARAM_VAL, condition.getParams() .get(PARAM_KEY)); assertTrue("condition should be inverted", condition.isInvert()); } }
package hu.bme.mit.inf.mdsd.one.app.statechart.futsal_report_generatorimpl; import hu.bme.mit.inf.mdsd.one.app.statechart.TimeEvent; import hu.bme.mit.inf.mdsd.one.app.statechart.ITimerService; public class Futsal_report_generatorStatemachine implements IFutsal_report_generatorStatemachine { private final TimeEvent futsal_report_generator_main_region__1st_Halftime_time_event_0 = new TimeEvent( true, 0); private final TimeEvent futsal_report_generator_main_region__2nd_Halftime_time_event_0 = new TimeEvent( true, 1); private final TimeEvent futsal_report_generator_main_region__1st_Extratime_time_event_0 = new TimeEvent( true, 2); private final TimeEvent futsal_report_generator_main_region__2nd_Extratime_time_event_0 = new TimeEvent( true, 3); private final TimeEvent futsal_report_generator_main_region_Breaktime_time_event_0 = new TimeEvent( true, 4); private final TimeEvent futsal_report_generator_main_region__1st_Visitor_Timeout_time_event_0 = new TimeEvent( true, 5); private final TimeEvent futsal_report_generator_main_region__1st_Home_Timeout_time_event_0 = new TimeEvent( true, 6); private final TimeEvent futsal_report_generator_main_region__2nd_Visitor_Timeout_time_event_0 = new TimeEvent( true, 7); private final TimeEvent futsal_report_generator_main_region__2nd_Home_Timeout_time_event_0 = new TimeEvent( true, 8); private final boolean[] timeEvents = new boolean[9]; private final class SCITimerImpl implements SCITimer { private SCITimerOperationCallback operationCallback; public void setSCITimerOperationCallback( SCITimerOperationCallback operationCallback) { this.operationCallback = operationCallback; } private boolean pause; public void raisePause() { pause = true; } private boolean continue_ID; public void raiseContinue() { continue_ID = true; } private boolean stop; public void raiseStop() { stop = true; } private boolean end_breaktime; public void raiseEnd_breaktime() { end_breaktime = true; } private boolean home_timeout; public void raiseHome_timeout() { home_timeout = true; } private boolean visitor_timeout; public void raiseVisitor_timeout() { visitor_timeout = true; } private int ht_long; public int getHt_long() { return ht_long; } public void setHt_long(int value) { this.ht_long = value; } private int bt_long; public int getBt_long() { return bt_long; } public void setBt_long(int value) { this.bt_long = value; } private int et_long; public int getEt_long() { return et_long; } public void setEt_long(int value) { this.et_long = value; } private int to_long; public int getTo_long() { return to_long; } public void setTo_long(int value) { this.to_long = value; } private boolean pause_v; public boolean getPause_v() { return pause_v; } public void setPause_v(boolean value) { this.pause_v = value; } private boolean stop_v; public boolean getStop_v() { return stop_v; } public void setStop_v(boolean value) { this.stop_v = value; } private int tick; public int getTick() { return tick; } public void setTick(int value) { this.tick = value; } private int breaktime_tick; public int getBreaktime_tick() { return breaktime_tick; } public void setBreaktime_tick(int value) { this.breaktime_tick = value; } private int timeout_tick; public int getTimeout_tick() { return timeout_tick; } public void setTimeout_tick(int value) { this.timeout_tick = value; } private boolean home_timeout_enabled; public boolean getHome_timeout_enabled() { return home_timeout_enabled; } public void setHome_timeout_enabled(boolean value) { this.home_timeout_enabled = value; } private boolean visitor_timeout_enabled; public boolean getVisitor_timeout_enabled() { return visitor_timeout_enabled; } public void setVisitor_timeout_enabled(boolean value) { this.visitor_timeout_enabled = value; } public void clearEvents() { pause = false; continue_ID = false; stop = false; end_breaktime = false; home_timeout = false; visitor_timeout = false; } } private SCITimerImpl sCITimer; private final class SCIFoulsImpl implements SCIFouls { private boolean continue_ID; public void raiseContinue() { continue_ID = true; } private int home_counter; public int getHome_counter() { return home_counter; } public void setHome_counter(int value) { this.home_counter = value; } private int visitors_counter; public int getVisitors_counter() { return visitors_counter; } public void setVisitors_counter(int value) { this.visitors_counter = value; } private boolean home_done; public boolean getHome_done() { return home_done; } public void setHome_done(boolean value) { this.home_done = value; } private boolean visitor_done; public boolean getVisitor_done() { return visitor_done; } public void setVisitor_done(boolean value) { this.visitor_done = value; } public void clearEvents() { continue_ID = false; } } private SCIFoulsImpl sCIFouls; public enum State { main_region__1st_Halftime, main_region__2nd_Halftime, main_region__final_, main_region__1st_Extratime, main_region__2nd_Extratime, main_region_Breaktime, main_region_End_Match, main_region_End_2nd_Extratime, main_region__1st_Visitor_Timeout, main_region__1st_Home_Timeout, main_region_Init, main_region__2nd_Visitor_Timeout, main_region__2nd_Home_Timeout, main_region_Waiting_For_the_Start_of_1st_Halftime, main_region_Waiting_for_the_Start_of_Breaktime, main_region_Waitng_For_the_Start_of_2nd_Halftime, main_region_Waiting_For_the_Start_of_2nd_Extratime, fouls_Counting_Fouls, fouls_Doing_some_noise_to_Home, fouls_Doing_some_noise_to_Visitor, $NullState$ }; private final State[] stateVector = new State[2]; private int nextStateIndex; private ITimerService timerService; private long cycleStartTime; public Futsal_report_generatorStatemachine() { sCITimer = new SCITimerImpl(); sCIFouls = new SCIFoulsImpl(); futsal_report_generator_main_region__1st_Halftime_time_event_0 .setStatemachine(this); futsal_report_generator_main_region__2nd_Halftime_time_event_0 .setStatemachine(this); futsal_report_generator_main_region__1st_Extratime_time_event_0 .setStatemachine(this); futsal_report_generator_main_region__2nd_Extratime_time_event_0 .setStatemachine(this); futsal_report_generator_main_region_Breaktime_time_event_0 .setStatemachine(this); futsal_report_generator_main_region__1st_Visitor_Timeout_time_event_0 .setStatemachine(this); futsal_report_generator_main_region__1st_Home_Timeout_time_event_0 .setStatemachine(this); futsal_report_generator_main_region__2nd_Visitor_Timeout_time_event_0 .setStatemachine(this); futsal_report_generator_main_region__2nd_Home_Timeout_time_event_0 .setStatemachine(this); } public void init() { if (timerService == null) { throw new IllegalStateException("TimerService not set."); } for (int i = 0; i < 2; i++) { stateVector[i] = State.$NullState$; } clearEvents(); clearOutEvents(); sCITimer.ht_long = 0; sCITimer.bt_long = 0; sCITimer.et_long = 0; sCITimer.to_long = 0; sCITimer.pause_v = false; sCITimer.stop_v = false; sCITimer.tick = 0; sCITimer.breaktime_tick = 0; sCITimer.timeout_tick = 0; sCITimer.home_timeout_enabled = false; sCITimer.visitor_timeout_enabled = false; sCIFouls.home_counter = 0; sCIFouls.visitors_counter = 0; sCIFouls.home_done = false; sCIFouls.visitor_done = false; } public void enter() { if (timerService == null) { throw new IllegalStateException("TimerService not set."); } cycleStartTime = timerService.getSystemTimeMillis(); entryAction(); sCITimer.home_timeout_enabled = true; sCITimer.visitor_timeout_enabled = true; sCITimer.raisePause(); sCITimer.pause_v = true; sCITimer.stop_v = false; sCIFouls.home_counter = 0; sCIFouls.visitors_counter = 0; sCIFouls.home_done = false; sCIFouls.visitor_done = false; sCITimer.bt_long = 60; sCITimer.et_long = 60; sCITimer.ht_long = 60; sCITimer.to_long = 60; nextStateIndex = 0; stateVector[0] = State.main_region_Init; nextStateIndex = 1; stateVector[1] = State.fouls_Counting_Fouls; } public void exit() { switch (stateVector[0]) { case main_region__1st_Halftime : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Halftime_time_event_0); break; case main_region__2nd_Halftime : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Halftime_time_event_0); sCITimer.pause_v = true; break; case main_region__final_ : nextStateIndex = 0; stateVector[0] = State.$NullState$; break; case main_region__1st_Extratime : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Extratime_time_event_0); sCITimer.pause_v = true; sCITimer.operationCallback.playSound(); break; case main_region__2nd_Extratime : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Extratime_time_event_0); sCITimer.operationCallback.playSound(); break; case main_region_Breaktime : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region_Breaktime_time_event_0); sCITimer.home_timeout_enabled = true; sCITimer.visitor_timeout_enabled = true; sCIFouls.home_counter = 0; sCIFouls.visitors_counter = 0; sCITimer.operationCallback.playSound(); sCIFouls.home_done = false; sCIFouls.visitor_done = false; break; case main_region_End_Match : nextStateIndex = 0; stateVector[0] = State.$NullState$; break; case main_region_End_2nd_Extratime : nextStateIndex = 0; stateVector[0] = State.$NullState$; sCITimer.tick = 2 * sCITimer.ht_long + 1 * sCITimer.et_long; sCITimer.raiseStop(); sCITimer.stop_v = true; break; case main_region__1st_Visitor_Timeout : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Visitor_Timeout_time_event_0); sCITimer.operationCallback.playSound(); break; case main_region__1st_Home_Timeout : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Home_Timeout_time_event_0); sCITimer.operationCallback.playSound(); break; case main_region_Init : nextStateIndex = 0; stateVector[0] = State.$NullState$; break; case main_region__2nd_Visitor_Timeout : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Visitor_Timeout_time_event_0); sCITimer.operationCallback.playSound(); break; case main_region__2nd_Home_Timeout : nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Home_Timeout_time_event_0); sCITimer.operationCallback.playSound(); break; case main_region_Waiting_For_the_Start_of_1st_Halftime : nextStateIndex = 0; stateVector[0] = State.$NullState$; sCITimer.tick = 0; break; case main_region_Waiting_for_the_Start_of_Breaktime : nextStateIndex = 0; stateVector[0] = State.$NullState$; break; case main_region_Waitng_For_the_Start_of_2nd_Halftime : nextStateIndex = 0; stateVector[0] = State.$NullState$; break; case main_region_Waiting_For_the_Start_of_2nd_Extratime : nextStateIndex = 0; stateVector[0] = State.$NullState$; break; default : break; } switch (stateVector[1]) { case fouls_Counting_Fouls : nextStateIndex = 1; stateVector[1] = State.$NullState$; break; case fouls_Doing_some_noise_to_Home : nextStateIndex = 1; stateVector[1] = State.$NullState$; sCIFouls.home_done = true; break; case fouls_Doing_some_noise_to_Visitor : nextStateIndex = 1; stateVector[1] = State.$NullState$; sCIFouls.visitor_done = true; break; default : break; } exitAction(); } protected void clearEvents() { sCITimer.clearEvents(); sCIFouls.clearEvents(); for (int i = 0; i < timeEvents.length; i++) { timeEvents[i] = false; } } protected void clearOutEvents() { } public boolean isStateActive(State state) { switch (state) { case main_region__1st_Halftime : return stateVector[0] == State.main_region__1st_Halftime; case main_region__2nd_Halftime : return stateVector[0] == State.main_region__2nd_Halftime; case main_region__final_ : return stateVector[0] == State.main_region__final_; case main_region__1st_Extratime : return stateVector[0] == State.main_region__1st_Extratime; case main_region__2nd_Extratime : return stateVector[0] == State.main_region__2nd_Extratime; case main_region_Breaktime : return stateVector[0] == State.main_region_Breaktime; case main_region_End_Match : return stateVector[0] == State.main_region_End_Match; case main_region_End_2nd_Extratime : return stateVector[0] == State.main_region_End_2nd_Extratime; case main_region__1st_Visitor_Timeout : return stateVector[0] == State.main_region__1st_Visitor_Timeout; case main_region__1st_Home_Timeout : return stateVector[0] == State.main_region__1st_Home_Timeout; case main_region_Init : return stateVector[0] == State.main_region_Init; case main_region__2nd_Visitor_Timeout : return stateVector[0] == State.main_region__2nd_Visitor_Timeout; case main_region__2nd_Home_Timeout : return stateVector[0] == State.main_region__2nd_Home_Timeout; case main_region_Waiting_For_the_Start_of_1st_Halftime : return stateVector[0] == State.main_region_Waiting_For_the_Start_of_1st_Halftime; case main_region_Waiting_for_the_Start_of_Breaktime : return stateVector[0] == State.main_region_Waiting_for_the_Start_of_Breaktime; case main_region_Waitng_For_the_Start_of_2nd_Halftime : return stateVector[0] == State.main_region_Waitng_For_the_Start_of_2nd_Halftime; case main_region_Waiting_For_the_Start_of_2nd_Extratime : return stateVector[0] == State.main_region_Waiting_For_the_Start_of_2nd_Extratime; case fouls_Counting_Fouls : return stateVector[1] == State.fouls_Counting_Fouls; case fouls_Doing_some_noise_to_Home : return stateVector[1] == State.fouls_Doing_some_noise_to_Home; case fouls_Doing_some_noise_to_Visitor : return stateVector[1] == State.fouls_Doing_some_noise_to_Visitor; default : return false; } } public void setTimerService(ITimerService timerService) { this.timerService = timerService; } public ITimerService getTimerService() { return timerService; } public void onTimeEventRaised(TimeEvent timeEvent) { timeEvents[timeEvent.getIndex()] = true; } public SCITimer getSCITimer() { return sCITimer; } public SCIFouls getSCIFouls() { return sCIFouls; } /* Entry action for statechart 'futsal_report_generator'. */ private void entryAction() { } /* Exit action for state 'futsal_report_generator'. */ private void exitAction() { } /* The reactions of state 1st Halftime. */ private void reactMain_region_1st_Halftime() { if (sCITimer.tick >= 1 * sCITimer.ht_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Halftime_time_event_0); sCITimer.operationCallback.playSound(); sCITimer.tick = 1 * sCITimer.ht_long; nextStateIndex = 0; stateVector[0] = State.main_region_Waiting_for_the_Start_of_Breaktime; } else { if (sCITimer.visitor_timeout && sCITimer.visitor_timeout_enabled) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Halftime_time_event_0); getTimerService() .setTimer( futsal_report_generator_main_region__1st_Visitor_Timeout_time_event_0, 1, cycleStartTime); sCITimer.visitor_timeout_enabled = false; sCITimer.timeout_tick = 0; nextStateIndex = 0; stateVector[0] = State.main_region__1st_Visitor_Timeout; } else { if (sCITimer.home_timeout && sCITimer.home_timeout_enabled) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Halftime_time_event_0); getTimerService() .setTimer( futsal_report_generator_main_region__1st_Home_Timeout_time_event_0, 1, cycleStartTime); sCITimer.home_timeout_enabled = false; sCITimer.timeout_tick = 0; nextStateIndex = 0; stateVector[0] = State.main_region__1st_Home_Timeout; } else { if (timeEvents[futsal_report_generator_main_region__1st_Halftime_time_event_0 .getIndex()]) { sCITimer.tick += 1; } } } } } /* The reactions of state 2nd Halftime. */ private void reactMain_region_2nd_Halftime() { if (sCITimer.tick >= 2 * sCITimer.ht_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Halftime_time_event_0); sCITimer.pause_v = true; sCITimer.tick = 2 * sCITimer.ht_long; sCITimer.operationCallback.playSound(); nextStateIndex = 0; stateVector[0] = State.main_region_End_Match; } else { if (sCITimer.home_timeout && sCITimer.home_timeout_enabled) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Halftime_time_event_0); sCITimer.pause_v = true; getTimerService() .setTimer( futsal_report_generator_main_region__2nd_Home_Timeout_time_event_0, 1, cycleStartTime); sCITimer.home_timeout_enabled = false; sCITimer.timeout_tick = 0; nextStateIndex = 0; stateVector[0] = State.main_region__2nd_Home_Timeout; } else { if (sCITimer.visitor_timeout && sCITimer.visitor_timeout_enabled) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Halftime_time_event_0); sCITimer.pause_v = true; getTimerService() .setTimer( futsal_report_generator_main_region__2nd_Visitor_Timeout_time_event_0, 1, cycleStartTime); sCITimer.visitor_timeout_enabled = false; sCITimer.timeout_tick = 0; nextStateIndex = 0; stateVector[0] = State.main_region__2nd_Visitor_Timeout; } else { if (timeEvents[futsal_report_generator_main_region__2nd_Halftime_time_event_0 .getIndex()]) { sCITimer.tick += 1; } } } } } /* The reactions of state null. */ private void reactMain_region__final_0() { } /* The reactions of state 1st Extratime. */ private void reactMain_region_1st_Extratime() { if (sCITimer.tick >= 2 * sCITimer.ht_long + 1 * sCITimer.et_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Extratime_time_event_0); sCITimer.pause_v = true; sCITimer.operationCallback.playSound(); sCITimer.tick = 2 * sCITimer.ht_long + 1 * sCITimer.et_long; nextStateIndex = 0; stateVector[0] = State.main_region_Waiting_For_the_Start_of_2nd_Extratime; } else { if (timeEvents[futsal_report_generator_main_region__1st_Extratime_time_event_0 .getIndex()]) { sCITimer.tick += 1; } } } /* The reactions of state 2nd Extratime. */ private void reactMain_region_2nd_Extratime() { if (sCITimer.tick >= 2 * sCITimer.ht_long + 2 * sCITimer.et_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Extratime_time_event_0); sCITimer.operationCallback.playSound(); nextStateIndex = 0; stateVector[0] = State.main_region_End_2nd_Extratime; } else { if (timeEvents[futsal_report_generator_main_region__2nd_Extratime_time_event_0 .getIndex()]) { sCITimer.tick += 1; } } } /* The reactions of state Breaktime. */ private void reactMain_region_Breaktime() { if (sCITimer.breaktime_tick >= sCITimer.bt_long || sCITimer.end_breaktime) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService().resetTimer( futsal_report_generator_main_region_Breaktime_time_event_0); sCITimer.home_timeout_enabled = true; sCITimer.visitor_timeout_enabled = true; sCIFouls.home_counter = 0; sCIFouls.visitors_counter = 0; sCITimer.operationCallback.playSound(); sCIFouls.home_done = false; sCIFouls.visitor_done = false; nextStateIndex = 0; stateVector[0] = State.main_region_Waitng_For_the_Start_of_2nd_Halftime; } else { if (timeEvents[futsal_report_generator_main_region_Breaktime_time_event_0 .getIndex()]) { sCITimer.breaktime_tick += 1; } } } /* The reactions of state End Match. */ private void reactMain_region_End_Match() { if (sCITimer.continue_ID) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .setTimer( futsal_report_generator_main_region__1st_Extratime_time_event_0, 1, cycleStartTime); nextStateIndex = 0; stateVector[0] = State.main_region__1st_Extratime; } } /* The reactions of state End 2nd Extratime. */ private void reactMain_region_End_2nd_Extratime() { } /* The reactions of state 1st Visitor Timeout. */ private void reactMain_region_1st_Visitor_Timeout() { if (sCITimer.timeout_tick >= sCITimer.to_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Visitor_Timeout_time_event_0); sCITimer.operationCallback.playSound(); getTimerService() .setTimer( futsal_report_generator_main_region__1st_Halftime_time_event_0, 1, cycleStartTime); nextStateIndex = 0; stateVector[0] = State.main_region__1st_Halftime; } else { if (timeEvents[futsal_report_generator_main_region__1st_Visitor_Timeout_time_event_0 .getIndex()]) { sCITimer.timeout_tick += 1; } } } /* The reactions of state 1st Home Timeout. */ private void reactMain_region_1st_Home_Timeout() { if (sCITimer.timeout_tick >= sCITimer.to_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__1st_Home_Timeout_time_event_0); sCITimer.operationCallback.playSound(); getTimerService() .setTimer( futsal_report_generator_main_region__1st_Halftime_time_event_0, 1, cycleStartTime); nextStateIndex = 0; stateVector[0] = State.main_region__1st_Halftime; } else { if (timeEvents[futsal_report_generator_main_region__1st_Home_Timeout_time_event_0 .getIndex()]) { sCITimer.timeout_tick += 1; } } } /* The reactions of state Init. */ private void reactMain_region_Init() { if (sCITimer.continue_ID) { nextStateIndex = 0; stateVector[0] = State.$NullState$; nextStateIndex = 0; stateVector[0] = State.main_region_Waiting_For_the_Start_of_1st_Halftime; } } /* The reactions of state 2nd Visitor Timeout. */ private void reactMain_region_2nd_Visitor_Timeout() { if (sCITimer.timeout_tick >= sCITimer.to_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Visitor_Timeout_time_event_0); sCITimer.operationCallback.playSound(); getTimerService() .setTimer( futsal_report_generator_main_region__2nd_Halftime_time_event_0, 1, cycleStartTime); nextStateIndex = 0; stateVector[0] = State.main_region__2nd_Halftime; } else { if (timeEvents[futsal_report_generator_main_region__2nd_Visitor_Timeout_time_event_0 .getIndex()]) { sCITimer.timeout_tick += 1; } } } /* The reactions of state 2nd Home Timeout. */ private void reactMain_region_2nd_Home_Timeout() { if (sCITimer.timeout_tick >= sCITimer.to_long) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .resetTimer( futsal_report_generator_main_region__2nd_Home_Timeout_time_event_0); sCITimer.operationCallback.playSound(); getTimerService() .setTimer( futsal_report_generator_main_region__2nd_Halftime_time_event_0, 1, cycleStartTime); nextStateIndex = 0; stateVector[0] = State.main_region__2nd_Halftime; } else { if (timeEvents[futsal_report_generator_main_region__2nd_Home_Timeout_time_event_0 .getIndex()]) { sCITimer.timeout_tick += 1; } } } /* The reactions of state Waiting For the Start of 1st Halftime. */ private void reactMain_region_Waiting_For_the_Start_of_1st_Halftime() { if (sCITimer.continue_ID) { nextStateIndex = 0; stateVector[0] = State.$NullState$; sCITimer.tick = 0; getTimerService() .setTimer( futsal_report_generator_main_region__1st_Halftime_time_event_0, 1, cycleStartTime); nextStateIndex = 0; stateVector[0] = State.main_region__1st_Halftime; } } /* The reactions of state Waiting for the Start of Breaktime. */ private void reactMain_region_Waiting_for_the_Start_of_Breaktime() { if (sCITimer.continue_ID) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService().setTimer( futsal_report_generator_main_region_Breaktime_time_event_0, 1, cycleStartTime); sCITimer.home_timeout_enabled = false; sCITimer.visitor_timeout_enabled = false; nextStateIndex = 0; stateVector[0] = State.main_region_Breaktime; } } /* The reactions of state Waitng For the Start of 2nd Halftime. */ private void reactMain_region_Waitng_For_the_Start_of_2nd_Halftime() { if (sCITimer.continue_ID) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .setTimer( futsal_report_generator_main_region__2nd_Halftime_time_event_0, 1, cycleStartTime); nextStateIndex = 0; stateVector[0] = State.main_region__2nd_Halftime; } } /* The reactions of state Waiting For the Start of 2nd Extratime. */ private void reactMain_region_Waiting_For_the_Start_of_2nd_Extratime() { if (sCITimer.continue_ID) { nextStateIndex = 0; stateVector[0] = State.$NullState$; getTimerService() .setTimer( futsal_report_generator_main_region__2nd_Extratime_time_event_0, 1, cycleStartTime); sCITimer.tick = 2 * sCITimer.ht_long + 1 * sCITimer.et_long; nextStateIndex = 0; stateVector[0] = State.main_region__2nd_Extratime; } } /* The reactions of state Counting Fouls. */ private void reactFouls_Counting_Fouls() { if (sCIFouls.home_counter == 5 && !sCIFouls.home_done) { nextStateIndex = 1; stateVector[1] = State.$NullState$; sCITimer.operationCallback.playSound(); nextStateIndex = 1; stateVector[1] = State.fouls_Doing_some_noise_to_Home; } else { if (sCIFouls.visitors_counter == 5 && !sCIFouls.visitor_done) { nextStateIndex = 1; stateVector[1] = State.$NullState$; sCITimer.operationCallback.playSound(); nextStateIndex = 1; stateVector[1] = State.fouls_Doing_some_noise_to_Visitor; } } } /* The reactions of state Doing some noise to Home. */ private void reactFouls_Doing_some_noise_to_Home() { if (sCIFouls.continue_ID) { nextStateIndex = 1; stateVector[1] = State.$NullState$; sCIFouls.home_done = true; nextStateIndex = 1; stateVector[1] = State.fouls_Counting_Fouls; } } /* The reactions of state Doing some noise to Visitor. */ private void reactFouls_Doing_some_noise_to_Visitor() { if (sCIFouls.continue_ID) { nextStateIndex = 1; stateVector[1] = State.$NullState$; sCIFouls.visitor_done = true; nextStateIndex = 1; stateVector[1] = State.fouls_Counting_Fouls; } } public void runCycle() { cycleStartTime = timerService.getSystemTimeMillis(); clearOutEvents(); for (nextStateIndex = 0; nextStateIndex < stateVector.length; nextStateIndex++) { switch (stateVector[nextStateIndex]) { case main_region__1st_Halftime : reactMain_region_1st_Halftime(); break; case main_region__2nd_Halftime : reactMain_region_2nd_Halftime(); break; case main_region__final_ : reactMain_region__final_0(); break; case main_region__1st_Extratime : reactMain_region_1st_Extratime(); break; case main_region__2nd_Extratime : reactMain_region_2nd_Extratime(); break; case main_region_Breaktime : reactMain_region_Breaktime(); break; case main_region_End_Match : reactMain_region_End_Match(); break; case main_region_End_2nd_Extratime : reactMain_region_End_2nd_Extratime(); break; case main_region__1st_Visitor_Timeout : reactMain_region_1st_Visitor_Timeout(); break; case main_region__1st_Home_Timeout : reactMain_region_1st_Home_Timeout(); break; case main_region_Init : reactMain_region_Init(); break; case main_region__2nd_Visitor_Timeout : reactMain_region_2nd_Visitor_Timeout(); break; case main_region__2nd_Home_Timeout : reactMain_region_2nd_Home_Timeout(); break; case main_region_Waiting_For_the_Start_of_1st_Halftime : reactMain_region_Waiting_For_the_Start_of_1st_Halftime(); break; case main_region_Waiting_for_the_Start_of_Breaktime : reactMain_region_Waiting_for_the_Start_of_Breaktime(); break; case main_region_Waitng_For_the_Start_of_2nd_Halftime : reactMain_region_Waitng_For_the_Start_of_2nd_Halftime(); break; case main_region_Waiting_For_the_Start_of_2nd_Extratime : reactMain_region_Waiting_For_the_Start_of_2nd_Extratime(); break; case fouls_Counting_Fouls : reactFouls_Counting_Fouls(); break; case fouls_Doing_some_noise_to_Home : reactFouls_Doing_some_noise_to_Home(); break; case fouls_Doing_some_noise_to_Visitor : reactFouls_Doing_some_noise_to_Visitor(); break; default : // $NullState$ } } clearEvents(); } }
// Copyright (c) 1999 Brian Wellington (bwelling@xbill.org) // Portions Copyright (c) 1999 Network Associates, Inc. package org.xbill.DNS; import java.io.*; import java.lang.reflect.*; import java.util.*; import org.xbill.DNS.utils.*; /** * The base class that all records are derived from. * * @author Brian Wellington */ abstract public class Record implements Cloneable { protected Name name; protected short type, dclass; protected int ttl; protected int wireLength = -1; protected Record() {} Record(Name _name, short _type, short _dclass, int _ttl) { name = _name; type = _type; dclass = _dclass; ttl = _ttl; } private static Class toClass(short type) throws ClassNotFoundException { String s = Record.class.toString(); /* * Remove "class " from the beginning, and "Record" from the end. * Then construct the new class name. */ return Class.forName(s.substring(6, s.length() - 6) + Type.string(type) + "Record"); } private static Record newRecord(Name name, short type, short dclass, int ttl, int length, DataByteInputStream in, Compression c) throws IOException { Record rec; try { Class rrclass; Constructor m; rrclass = toClass(type); m = rrclass.getDeclaredConstructor(new Class [] { Name.class, Short.TYPE, Integer.TYPE, Integer.TYPE, DataByteInputStream.class, Compression.class }); rec = (Record) m.newInstance(new Object [] { name, new Short(dclass), new Integer(ttl), new Integer(length), in, c }); return rec; } catch (ClassNotFoundException e) { rec = new UNKRecord(name, type, dclass, ttl, length, in, c); rec.wireLength = length; return rec; } catch (InvocationTargetException e) { if (Options.check("verbose")) { System.err.println("new record: " + e); System.err.println(e.getTargetException()); } return null; } catch (Exception e) { if (Options.check("verbose")) System.err.println("new record: " + e); return null; } } /** * Creates a new record, with the given parameters. * @return An object of a type extending Record */ public static Record newRecord(Name name, short type, short dclass, int ttl, int length, byte [] data) { DataByteInputStream dbs; if (data != null) dbs = new DataByteInputStream(data); else dbs = null; try { return newRecord(name, type, dclass, ttl, length, dbs, null); } catch (IOException e) { return null; } } /** * Creates a new empty record, with the given parameters. * @return An object of a type extending Record */ public static Record newRecord(Name name, short type, short dclass, int ttl) { return newRecord(name, type, dclass, ttl, 0, null); } /** * Creates a new empty record, with the given parameters. This method is * designed to create records that will be added to the QUERY section * of a message. * @return An object of a type extending Record */ public static Record newRecord(Name name, short type, short dclass) { return newRecord(name, type, dclass, 0, 0, null); } static Record fromWire(DataByteInputStream in, int section, Compression c) throws IOException { short type, dclass; int ttl; short length; Name name; Record rec; int start, datastart; start = in.getPos(); name = new Name(in, c); type = in.readShort(); dclass = in.readShort(); if (section == Section.QUESTION) return newRecord(name, type, dclass); ttl = in.readInt(); length = in.readShort(); datastart = in.getPos(); rec = newRecord(name, type, dclass, ttl, length, in, c); if (in.getPos() - datastart != length) throw new IOException("Invalid record length"); rec.wireLength = in.getPos() - start; return rec; } /** * Builds a Record from DNS uncompressed wire format. */ public static Record fromWire(byte [] b, int section) throws IOException { DataByteInputStream in = new DataByteInputStream(b); return fromWire(in, section, null); } void toWire(DataByteOutputStream out, int section, Compression c) throws IOException { int start = out.getPos(); name.toWire(out, c); out.writeShort(type); out.writeShort(dclass); if (section == Section.QUESTION) return; out.writeInt(ttl); int lengthPosition = out.getPos(); out.writeShort(0); /* until we know better */ rrToWire(out, c); out.writeShortAt(out.getPos() - lengthPosition - 2, lengthPosition); wireLength = out.getPos() - start; } /** * Converts a Record into DNS uncompressed wire format. */ public byte [] toWire(int section) throws IOException { DataByteOutputStream out = new DataByteOutputStream(); toWire(out, section, null); return out.toByteArray(); } void toWireCanonical(DataByteOutputStream out) throws IOException { name.toWireCanonical(out); out.writeShort(type); out.writeShort(dclass); out.writeInt(ttl); int lengthPosition = out.getPos(); out.writeShort(0); /* until we know better */ rrToWireCanonical(out); out.writeShortAt(out.getPos() - lengthPosition - 2, lengthPosition); } /** * Converts a Record into canonical DNS uncompressed wire format (all names are * converted to lowercase). */ public byte [] toWireCanonical() throws IOException { DataByteOutputStream out = new DataByteOutputStream(); toWireCanonical(out); return out.toByteArray(); } StringBuffer toStringNoData() { StringBuffer sb = new StringBuffer(); sb.append(name); sb.append("\t"); if (Options.check("BINDTTL")) sb.append(TTL.format(ttl)); else sb.append(ttl); sb.append(" "); if (dclass != DClass.IN || !Options.check("noPrintIN")) { sb.append(DClass.string(dclass)); sb.append(" "); } sb.append(Type.string(type)); sb.append("\t\t"); return sb; } /** * Converts a Record into a String representation */ public String toString() { StringBuffer sb = toStringNoData(); sb.append("<unknown format>"); return sb.toString(); } /** * Builds a new Record from its textual representation */ public static Record fromString(Name name, short type, short dclass, int ttl, MyStringTokenizer st, Name origin) throws IOException { Record rec; try { Class rrclass; Constructor m; rrclass = toClass(type); m = rrclass.getDeclaredConstructor(new Class [] { Name.class, Short.TYPE, Integer.TYPE, MyStringTokenizer.class, Name.class, }); rec = (Record) m.newInstance(new Object [] { name, new Short(dclass), new Integer(ttl), st, origin }); return rec; } catch (ClassNotFoundException e) { rec = new UNKRecord(name, type, dclass, ttl, st, origin); return rec; } catch (InvocationTargetException e) { if (Options.check("verbose")) { System.err.println("from text: " + e); System.err.println(e.getTargetException()); } return null; } catch (Exception e) { if (Options.check("verbose")) System.err.println("from text: " + e); return null; } } /** * Returns the record's name * @see Name */ public Name getName() { return name; } /** * Returns record's type * @see Type */ public short getType() { return type; } /** * Returns the type of RRset that this record would belong to. For all types * except SIGRecord, this is equivalent to getType(). * @return The type of record, if not SIGRecord. If the type is SIGRecord, * the type covered is returned. * @see Type * @see RRset * @see SIGRecord */ public short getRRsetType() { return type; } /** * Returns the record's class */ public short getDClass() { return dclass; } /** * Returns the record's TTL */ public int getTTL() { return ttl; } /** * Returns the length of this record in wire format, based on the last time * this record was parsed from data or converted to data. The wire format * may or may not be compressed * @return The last known length, or -1 if the record has never been in wire * format */ public short getWireLength() { return (short) wireLength; } /** * Converts the type-specific RR to wire format - must be overriden */ abstract void rrToWire(DataByteOutputStream out, Compression c) throws IOException; /** * Converts the type-specific RR to canonical wire format - must be overriden * if the type-specific RR data includes a Name * @see Name */ void rrToWireCanonical(DataByteOutputStream out) throws IOException { rrToWire(out, null); } /** * Determines if two Records are identical */ public boolean equals(Object arg) { if (arg == null || !(arg instanceof Record)) return false; Record r = (Record) arg; try { byte [] array1 = toWire(Section.ANSWER); byte [] array2 = r.toWire(Section.ANSWER); if (array1.length != array2.length) return false; for (int i = 0; i < array1.length; i++) if (array1[i] != array2[i]) return false; return true; } catch (IOException e) { return false; } } /** * Generates a hash code based on the Record's data */ public int hashCode() { try { byte [] array1 = toWire(Section.ANSWER); return array1.hashCode(); } catch (IOException e) { return 0; } } /** * Creates a new record identical to the current record, but with a different * name. This is most useful for replacing the name of a wildcard record. */ public Record withName(Name name) { Record rec = null; try { rec = (Record) clone(); } catch (CloneNotSupportedException e) { } rec.name = name; return rec; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.controlprogram.parfor.opt; import java.io.IOException; import java.util.ArrayList; import org.apache.sysml.conf.ConfigurationManager; import org.apache.sysml.conf.DMLConfig; import org.apache.sysml.hops.Hop; import org.apache.sysml.hops.HopsException; import org.apache.sysml.hops.IndexingOp; import org.apache.sysml.hops.OptimizerUtils; import org.apache.sysml.hops.Hop.VisitStatus; import org.apache.sysml.hops.recompile.Recompiler; import org.apache.sysml.lops.LopProperties; import org.apache.sysml.lops.Lop; import org.apache.sysml.lops.LopsException; import org.apache.sysml.lops.compile.Dag; import org.apache.sysml.parser.DMLProgram; import org.apache.sysml.parser.DMLTranslator; import org.apache.sysml.parser.ForStatement; import org.apache.sysml.parser.ForStatementBlock; import org.apache.sysml.parser.IfStatement; import org.apache.sysml.parser.IfStatementBlock; import org.apache.sysml.parser.StatementBlock; import org.apache.sysml.parser.WhileStatement; import org.apache.sysml.parser.WhileStatementBlock; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.DMLUnsupportedOperationException; import org.apache.sysml.runtime.controlprogram.ForProgramBlock; import org.apache.sysml.runtime.controlprogram.IfProgramBlock; import org.apache.sysml.runtime.controlprogram.LocalVariableMap; import org.apache.sysml.runtime.controlprogram.ParForProgramBlock; import org.apache.sysml.runtime.controlprogram.Program; import org.apache.sysml.runtime.controlprogram.ProgramBlock; import org.apache.sysml.runtime.controlprogram.WhileProgramBlock; import org.apache.sysml.runtime.controlprogram.context.ExecutionContext; import org.apache.sysml.runtime.controlprogram.parfor.ProgramConverter; import org.apache.sysml.runtime.controlprogram.parfor.opt.OptNode.NodeType; import org.apache.sysml.runtime.instructions.Instruction; import org.apache.sysml.runtime.instructions.cp.ArithmeticBinaryCPInstruction; import org.apache.sysml.runtime.instructions.cp.Data; import org.apache.sysml.runtime.instructions.cp.FunctionCallCPInstruction; import org.apache.sysml.runtime.instructions.cp.ScalarObject; /** * */ public class ProgramRecompiler { /** * * @param rtprog * @param sbs * @return * @throws IOException * @throws DMLUnsupportedOperationException * @throws DMLRuntimeException * @throws LopsException * @throws HopsException */ public static ArrayList<ProgramBlock> generatePartitialRuntimeProgram(Program rtprog, ArrayList<StatementBlock> sbs) throws LopsException, DMLRuntimeException, DMLUnsupportedOperationException, IOException, HopsException { ArrayList<ProgramBlock> ret = new ArrayList<ProgramBlock>(); DMLConfig config = ConfigurationManager.getConfig(); //construct lops from hops if not existing DMLTranslator dmlt = new DMLTranslator(sbs.get(0).getDMLProg()); for( StatementBlock sb : sbs ) { dmlt.constructLops(sb); } //construct runtime program from lops for( StatementBlock sb : sbs ) { DMLProgram prog = sb.getDMLProg(); ret.add( prog.createRuntimeProgramBlock(rtprog, sb, config) ); } return ret; } /** * NOTE: if force is set, we set and recompile the respective indexing hops; * otherwise, we release the forced exec type and recompile again. Hence, * any changes can be exactly reverted with the same access behavior. * * @param sb * @param pb * @param var * @param ec * @param force * @throws DMLUnsupportedOperationException * @throws DMLRuntimeException */ public static void rFindAndRecompileIndexingHOP( StatementBlock sb, ProgramBlock pb, String var, ExecutionContext ec, boolean force ) throws DMLUnsupportedOperationException, DMLRuntimeException { if( pb instanceof IfProgramBlock && sb instanceof IfStatementBlock ) { IfProgramBlock ipb = (IfProgramBlock) pb; IfStatementBlock isb = (IfStatementBlock) sb; IfStatement is = (IfStatement) sb.getStatement(0); //process if condition if( isb.getPredicateHops()!=null ) ipb.setPredicate( rFindAndRecompileIndexingHOP(isb.getPredicateHops(),ipb.getPredicate(),var,ec,force) ); //process if branch int len = is.getIfBody().size(); for( int i=0; i<ipb.getChildBlocksIfBody().size() && i<len; i++ ) { ProgramBlock lpb = ipb.getChildBlocksIfBody().get(i); StatementBlock lsb = is.getIfBody().get(i); rFindAndRecompileIndexingHOP(lsb,lpb,var,ec,force); } //process else branch if( ipb.getChildBlocksElseBody() != null ) { int len2 = is.getElseBody().size(); for( int i=0; i<ipb.getChildBlocksElseBody().size() && i<len2; i++ ) { ProgramBlock lpb = ipb.getChildBlocksElseBody().get(i); StatementBlock lsb = is.getElseBody().get(i); rFindAndRecompileIndexingHOP(lsb,lpb,var,ec,force); } } } else if( pb instanceof WhileProgramBlock && sb instanceof WhileStatementBlock ) { WhileProgramBlock wpb = (WhileProgramBlock) pb; WhileStatementBlock wsb = (WhileStatementBlock) sb; WhileStatement ws = (WhileStatement) sb.getStatement(0); //process while condition if( wsb.getPredicateHops()!=null ) wpb.setPredicate( rFindAndRecompileIndexingHOP(wsb.getPredicateHops(),wpb.getPredicate(),var,ec,force) ); //process body int len = ws.getBody().size(); //robustness for potentially added problem blocks for( int i=0; i<wpb.getChildBlocks().size() && i<len; i++ ) { ProgramBlock lpb = wpb.getChildBlocks().get(i); StatementBlock lsb = ws.getBody().get(i); rFindAndRecompileIndexingHOP(lsb,lpb,var,ec, force); } } else if( pb instanceof ForProgramBlock && sb instanceof ForStatementBlock ) //for or parfor { ForProgramBlock fpb = (ForProgramBlock) pb; ForStatementBlock fsb = (ForStatementBlock)sb; ForStatement fs = (ForStatement) fsb.getStatement(0); if( fsb.getFromHops()!=null ) fpb.setFromInstructions( rFindAndRecompileIndexingHOP(fsb.getFromHops(),fpb.getFromInstructions(),var,ec,force) ); if( fsb.getToHops()!=null ) fpb.setToInstructions( rFindAndRecompileIndexingHOP(fsb.getToHops(),fpb.getToInstructions(),var,ec,force) ); if( fsb.getIncrementHops()!=null ) fpb.setIncrementInstructions( rFindAndRecompileIndexingHOP(fsb.getIncrementHops(),fpb.getIncrementInstructions(),var,ec,force) ); //process body int len = fs.getBody().size(); //robustness for potentially added problem blocks for( int i=0; i<fpb.getChildBlocks().size() && i<len; i++ ) { ProgramBlock lpb = fpb.getChildBlocks().get(i); StatementBlock lsb = fs.getBody().get(i); rFindAndRecompileIndexingHOP(lsb,lpb,var,ec, force); } } else //last level program block { try { //process actual hops boolean ret = false; Hop.resetVisitStatus(sb.get_hops()); if( force ) { //set forced execution type for( Hop h : sb.get_hops() ) ret |= rFindAndSetCPIndexingHOP(h, var); } else { //release forced execution type for( Hop h : sb.get_hops() ) ret |= rFindAndReleaseIndexingHOP(h, var); } //recompilation on-demand if( ret ) { //construct new instructions ArrayList<Instruction> newInst = Recompiler.recompileHopsDag(sb, sb.get_hops(), ec.getVariables(), null, true, 0); pb.setInstructions( newInst ); } } catch(Exception ex) { throw new DMLRuntimeException(ex); } } } /** * * @param prog * @param parforSB * @param vars * @return * @throws DMLUnsupportedOperationException * @throws DMLRuntimeException */ public static LocalVariableMap getReusableScalarVariables( DMLProgram prog, StatementBlock parforSB, LocalVariableMap vars ) throws DMLUnsupportedOperationException, DMLRuntimeException { LocalVariableMap constVars = new LocalVariableMap(); for( String varname : vars.keySet() ) { Data dat = vars.get(varname); if( dat instanceof ScalarObject //scalar && isApplicableForReuseVariable(prog, parforSB, varname) ) //constant { constVars.put(varname, dat); } } return constVars; } public static void replaceConstantScalarVariables( StatementBlock sb, LocalVariableMap vars ) throws DMLUnsupportedOperationException, DMLRuntimeException, HopsException { if( sb instanceof IfStatementBlock ) { IfStatementBlock isb = (IfStatementBlock) sb; IfStatement is = (IfStatement) sb.getStatement(0); replacePredicateLiterals(isb.getPredicateHops(), vars); for( StatementBlock lsb : is.getIfBody() ) replaceConstantScalarVariables(lsb, vars); for( StatementBlock lsb : is.getElseBody() ) replaceConstantScalarVariables(lsb, vars); } else if( sb instanceof WhileStatementBlock ) { WhileStatementBlock wsb = (WhileStatementBlock) sb; WhileStatement ws = (WhileStatement) sb.getStatement(0); replacePredicateLiterals(wsb.getPredicateHops(), vars); for( StatementBlock lsb : ws.getBody() ) replaceConstantScalarVariables(lsb, vars); } else if( sb instanceof ForStatementBlock ) //for or parfor { ForStatementBlock fsb = (ForStatementBlock)sb; ForStatement fs = (ForStatement) fsb.getStatement(0); replacePredicateLiterals(fsb.getFromHops(), vars); replacePredicateLiterals(fsb.getToHops(), vars); replacePredicateLiterals(fsb.getIncrementHops(), vars); for( StatementBlock lsb : fs.getBody() ) replaceConstantScalarVariables(lsb, vars); } else //last level block { ArrayList<Hop> hops = sb.get_hops(); if( hops != null ) { //replace constant literals Hop.resetVisitStatus(hops); for( Hop hopRoot : hops ) Recompiler.rReplaceLiterals( hopRoot, vars ); } } } /** * * @param pred * @param vars * @throws DMLRuntimeException */ private static void replacePredicateLiterals( Hop pred, LocalVariableMap vars ) throws DMLRuntimeException { if( pred != null ){ pred.resetVisitStatus(); Recompiler.rReplaceLiterals(pred, vars); } } /** * This function determines if an parfor input variable is guaranteed to be read-only * across multiple invocations of parfor optimization (e.g., in a surrounding while loop). * In case of invariant variables we can reuse partitioned matrices and propagate constants * for better size estimation. * * @param prog * @param parforSB * @param var * @return * @throws DMLUnsupportedOperationException * @throws DMLRuntimeException */ public static boolean isApplicableForReuseVariable( DMLProgram prog, StatementBlock parforSB, String var ) throws DMLUnsupportedOperationException, DMLRuntimeException { boolean ret = false; for( StatementBlock sb : prog.getStatementBlocks() ) ret |= isApplicableForReuseVariable(sb, parforSB, var); return ret; } /** * * @param sb * @param parforSB * @param var * @throws DMLUnsupportedOperationException * @throws DMLRuntimeException */ private static boolean isApplicableForReuseVariable( StatementBlock sb, StatementBlock parforSB, String var ) throws DMLUnsupportedOperationException, DMLRuntimeException { boolean ret = false; if( sb instanceof IfStatementBlock ) { IfStatement is = (IfStatement) sb.getStatement(0); for( StatementBlock lsb : is.getIfBody() ) ret |= isApplicableForReuseVariable(lsb, parforSB, var); for( StatementBlock lsb : is.getElseBody() ) ret |= isApplicableForReuseVariable(lsb, parforSB, var); } else if( sb instanceof WhileStatementBlock ) { WhileStatement ws = (WhileStatement) sb.getStatement(0); for( StatementBlock lsb : ws.getBody() ) ret |= isApplicableForReuseVariable(lsb, parforSB, var); } else if( sb instanceof ForStatementBlock ) //for or parfor { ForStatementBlock fsb = (ForStatementBlock)sb; ForStatement fs = (ForStatement) fsb.getStatement(0); if( fsb == parforSB ) { //found parfor statement ret = true; } else { for( StatementBlock lsb : fs.getBody() ) ret |= isApplicableForReuseVariable(lsb, parforSB, var); } } return ret && !sb.variablesUpdated().containsVariable(var); } /** * * @param pb * @return */ public static boolean containsAtLeastOneFunction( ProgramBlock pb ) { if( pb instanceof IfProgramBlock ) { IfProgramBlock ipb = (IfProgramBlock) pb; for( ProgramBlock lpb : ipb.getChildBlocksIfBody() ) if( containsAtLeastOneFunction(lpb) ) return true; for( ProgramBlock lpb : ipb.getChildBlocksElseBody() ) if( containsAtLeastOneFunction(lpb) ) return true; } else if( pb instanceof WhileProgramBlock ) { WhileProgramBlock wpb = (WhileProgramBlock) pb; for( ProgramBlock lpb : wpb.getChildBlocks() ) if( containsAtLeastOneFunction(lpb) ) return true; } else if( pb instanceof ForProgramBlock ) //incl parfor { ForProgramBlock fpb = (ForProgramBlock) pb; for( ProgramBlock lpb : fpb.getChildBlocks() ) if( containsAtLeastOneFunction(lpb) ) return true; } else { if( pb.getInstructions() != null ) for( Instruction inst : pb.getInstructions() ) if( inst instanceof FunctionCallCPInstruction ) return true; } return false; } /** * * @param hop * @param in * @param force * @return * @throws DMLRuntimeException */ private static ArrayList<Instruction> rFindAndRecompileIndexingHOP( Hop hop, ArrayList<Instruction> in, String var, ExecutionContext ec, boolean force ) throws DMLRuntimeException { ArrayList<Instruction> tmp = in; try { boolean ret = false; hop.resetVisitStatus(); if( force ) //set forced execution type ret = rFindAndSetCPIndexingHOP(hop, var); else //release forced execution type ret = rFindAndReleaseIndexingHOP(hop, var); //recompilation on-demand if( ret ) { //construct new instructions tmp = Recompiler.recompileHopsDag(hop, ec.getVariables(), null, true, 0); } } catch(Exception ex) { throw new DMLRuntimeException(ex); } return tmp; } /** * * @param hop * @param var * @return */ private static boolean rFindAndSetCPIndexingHOP(Hop hop, String var) { boolean ret = false; if( hop.getVisited() == VisitStatus.DONE ) return ret; ArrayList<Hop> in = hop.getInput(); if( hop instanceof IndexingOp ) { String inMatrix = hop.getInput().get(0).getName(); if( inMatrix.equals(var) ) { //NOTE: mem estimate of RIX, set to output size by parfor optmizer //(rowblock/colblock only applied if in total less than two blocks, // hence always mem_est<mem_budget) if( hop.getMemEstimate() < OptimizerUtils.getLocalMemBudget() ) hop.setForcedExecType( LopProperties.ExecType.CP ); else hop.setForcedExecType( LopProperties.ExecType.CP_FILE ); ret = true; } } //recursive search if( in != null ) for( Hop hin : in ) ret |= rFindAndSetCPIndexingHOP(hin,var); hop.setVisited(VisitStatus.DONE); return ret; } private static boolean rFindAndReleaseIndexingHOP(Hop hop, String var) { boolean ret = false; if( hop.getVisited() == VisitStatus.DONE ) return ret; ArrayList<Hop> in = hop.getInput(); if( hop instanceof IndexingOp ) { String inMatrix = hop.getInput().get(0).getName(); if( inMatrix.equals(var) ) { hop.setForcedExecType(null); hop.clearMemEstimate(); ret = true; } } //recursive search if( in != null ) for( Hop hin : in ) ret |= rFindAndReleaseIndexingHOP(hin,var); hop.setVisited(VisitStatus.DONE); return ret; } /////// // additional general-purpose functionalities /** * * @param iterVar * @param offset * @return * @throws DMLRuntimeException * @throws DMLUnsupportedOperationException */ protected static ArrayList<Instruction> createNestedParallelismToInstructionSet(String iterVar, String offset) throws DMLRuntimeException, DMLUnsupportedOperationException { //create instruction string StringBuilder sb = new StringBuilder("CP"+Lop.OPERAND_DELIMITOR+"+"+Lop.OPERAND_DELIMITOR); sb.append(iterVar); sb.append(Lop.DATATYPE_PREFIX+"SCALAR"+Lop.VALUETYPE_PREFIX+"INT"+Lop.OPERAND_DELIMITOR); sb.append(offset); sb.append(Lop.DATATYPE_PREFIX+"SCALAR"+Lop.VALUETYPE_PREFIX+"INT"+Lop.OPERAND_DELIMITOR); sb.append(iterVar); sb.append(Lop.DATATYPE_PREFIX+"SCALAR"+Lop.VALUETYPE_PREFIX+"INT"); String str = sb.toString(); //create instruction set ArrayList<Instruction> tmp = new ArrayList<Instruction>(); Instruction inst = ArithmeticBinaryCPInstruction.parseInstruction(str); tmp.add(inst); return tmp; } ///////////////////////////////// // experimental functionality ////////// /** * * @param n * @throws DMLRuntimeException */ protected static void recompilePartialPlan( OptNode n ) throws DMLRuntimeException { //NOTE: need to recompile complete programblock because (1) many to many relationships //between hops and instructions and (2) due to changed internal variable names try { //get parent program and statement block OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping(); long pid = map.getMappedParentID(n.getID()); Object[] o = map.getMappedProg(pid); StatementBlock sbOld = (StatementBlock) o[0]; ProgramBlock pbOld = (ProgramBlock) o[1]; //get changed node and set type appropriately Hop hop = (Hop) map.getMappedHop(n.getID()); hop.setForcedExecType(n.getExecType().toLopsExecType()); hop.setLops(null); //to enable fresh construction //get all hops of statement and construct new instructions Dag<Lop> dag = new Dag<Lop>(); for( Hop hops : sbOld.get_hops() ) { hops.resetVisitStatus(); Recompiler.rClearLops(hops); Lop lops = hops.constructLops(); lops.addToDag(dag); } //construct new instructions ArrayList<Instruction> newInst = dag.getJobs(sbOld, ConfigurationManager.getConfig()); //exchange instructions pbOld.getInstructions().clear(); pbOld.getInstructions().addAll(newInst); } catch(Exception ex) { throw new DMLRuntimeException(ex); } } /** * NOTE: need to recompile complete programblock because (1) many to many relationships * between hops and instructions and (2) due to changed internal variable names * * @param n * @return * @throws DMLRuntimeException */ protected static ProgramBlock recompile( OptNode n ) throws DMLRuntimeException { ProgramBlock pbNew = null; try { if( n.getNodeType() == NodeType.HOP ) { //get parent program and statement block OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping(); long pid = map.getMappedParentID(n.getID()); Object[] o = map.getMappedProg(pid); StatementBlock sbOld = (StatementBlock) o[0]; ProgramBlock pbOld = (ProgramBlock) o[1]; LopProperties.ExecType oldtype = null; //get changed node and set type appropriately Hop hop = (Hop) map.getMappedHop(n.getID()); hop.setForcedExecType(n.getExecType().toLopsExecType()); hop.setLops(null); //to enable fresh construction //get all hops of statement and construct new lops Dag<Lop> dag = new Dag<Lop>(); for( Hop hops : sbOld.get_hops() ) { hops.resetVisitStatus(); Recompiler.rClearLops(hops); Lop lops = hops.constructLops(); lops.addToDag(dag); } //construct new instructions ArrayList<Instruction> newInst = dag.getJobs(sbOld, ConfigurationManager.getConfig()); //exchange instructions pbNew = new ProgramBlock(pbOld.getProgram()); pbNew.setInstructions(newInst); //reset type global repository hop.setForcedExecType(oldtype); } else if( n.getNodeType() == NodeType.PARFOR ) { //no recompilation required OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping(); ParForProgramBlock pb = (ParForProgramBlock)map.getMappedProg(n.getID())[1]; pbNew = ProgramConverter.createShallowCopyParForProgramBlock(pb, pb.getProgram()); ((ParForProgramBlock)pbNew).setExecMode(n.getExecType().toParForExecMode()); } else { throw new DMLRuntimeException("Unexpected node type."); } } catch(Exception ex) { throw new DMLRuntimeException(ex); } return pbNew; } /** * * @param hlNodeID * @param pbNew * @throws DMLRuntimeException */ protected static void exchangeProgram(long hlNodeID, ProgramBlock pbNew) throws DMLRuntimeException { OptTreePlanMappingAbstract map = OptTreeConverter.getAbstractPlanMapping(); OptNode node = map.getOptNode(hlNodeID); if( node.getNodeType() == NodeType.HOP ) { long pid = map.getMappedParentID(hlNodeID); Object[] o = map.getMappedProg(pid); ProgramBlock pbOld = (ProgramBlock) o[1]; //exchange instructions (save version) pbOld.getInstructions().clear(); pbOld.getInstructions().addAll( pbNew.getInstructions() ); } else if( node.getNodeType() == NodeType.PARFOR ) { ParForProgramBlock pbOld = (ParForProgramBlock) map.getMappedProg(node.getID())[1]; pbOld.setExecMode(((ParForProgramBlock)pbNew).getExecMode()); //TODO extend as required } else { throw new DMLRuntimeException("Unexpected node type: "+node.getNodeType()); } } }